From 0ac6a364218705681a2c7fce277c410a801b39f0 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 8 Mar 2018 09:10:21 -0700 Subject: [PATCH 01/89] Remove now-unused createParser that uses BytesReference (#28926) This removes `BytesReference` use from XContent and all subclasses. Relates to #28504 --- .../org/elasticsearch/common/xcontent/XContent.java | 10 ---------- .../common/xcontent/cbor/CborXContent.java | 7 ------- .../common/xcontent/json/JsonXContent.java | 7 ------- .../common/xcontent/smile/SmileXContent.java | 7 ------- .../common/xcontent/yaml/YamlXContent.java | 7 ------- 5 files changed, 38 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContent.java index c7118f025ee04..11d019c3291d1 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContent.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.bytes.BytesReference; import java.io.IOException; import java.io.InputStream; @@ -104,15 +103,6 @@ XContentParser createParser(NamedXContentRegistry xContentRegistry, XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, byte[] data, int offset, int length) throws IOException; - /** - * Creates a parser over the provided bytes. - * @deprecated use {@link #createParser(NamedXContentRegistry, DeprecationHandler, InputStream)} instead, - * the BytesReference coupling in this class will be removed in a future commit - */ - @Deprecated - XContentParser createParser(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, BytesReference bytes) throws IOException; - /** * Creates a parser over the provided reader. */ diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java index f05b38fb20e6a..222cf8e98bd32 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -103,12 +102,6 @@ public XContentParser createParser(NamedXContentRegistry xContentRegistry, return new CborXContentParser(xContentRegistry, deprecationHandler, cborFactory.createParser(data, offset, length)); } - @Override - public XContentParser createParser(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, BytesReference bytes) throws IOException { - return createParser(xContentRegistry, deprecationHandler, bytes.streamInput()); - } - @Override public XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, Reader reader) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java index 7f5174d272266..813aa64d9ffa3 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -104,12 +103,6 @@ public XContentParser createParser(NamedXContentRegistry xContentRegistry, return new JsonXContentParser(xContentRegistry, deprecationHandler, jsonFactory.createParser(data, offset, length)); } - @Override - public XContentParser createParser(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, BytesReference bytes) throws IOException { - return createParser(xContentRegistry, deprecationHandler, bytes.streamInput()); - } - @Override public XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, Reader reader) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java index 17de93d87baaf..bbe3542f29c5a 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -104,12 +103,6 @@ public XContentParser createParser(NamedXContentRegistry xContentRegistry, return new SmileXContentParser(xContentRegistry, deprecationHandler, smileFactory.createParser(data, offset, length)); } - @Override - public XContentParser createParser(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, BytesReference bytes) throws IOException { - return createParser(xContentRegistry, deprecationHandler, bytes.streamInput()); - } - @Override public XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, Reader reader) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java index 3547440eb8b32..c14e87661990f 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -99,12 +98,6 @@ public XContentParser createParser(NamedXContentRegistry xContentRegistry, return new YamlXContentParser(xContentRegistry, deprecationHandler, yamlFactory.createParser(data, offset, length)); } - @Override - public XContentParser createParser(NamedXContentRegistry xContentRegistry, - DeprecationHandler deprecationHandler, BytesReference bytes) throws IOException { - return createParser(xContentRegistry, deprecationHandler, bytes.streamInput()); - } - @Override public XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, Reader reader) throws IOException { From 71d861414c49f337439e7bd0ea935b2b4242c381 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 8 Mar 2018 15:32:41 -0700 Subject: [PATCH 02/89] Factor UnknownNamedObjectException into its own class (#28931) * Factor UnknownNamedObjectException into its own class This moves the inner class `UnknownNamedObjectException` from `NamedXContentRegistry` into a top-level class. This is so that `NamedXContentRegistry` doesn't have to depend on StreamInput and StreamOutput. Relates to #28504 --- .../elasticsearch/ElasticsearchException.java | 4 +- .../cluster/metadata/MetaData.java | 2 +- .../xcontent/NamedXContentRegistry.java | 49 ------------ .../xcontent/UnknownNamedObjectException.java | 74 +++++++++++++++++++ .../common/xcontent/yaml/YamlXContent.java | 3 +- .../index/query/AbstractQueryBuilder.java | 2 +- .../ExceptionSerializationTests.java | 3 +- .../common/xcontent/BaseXContentTestCase.java | 2 +- .../UnknownNamedObjectExceptionTests.java | 1 - .../xcontent/XContentParserUtilsTests.java | 2 +- 10 files changed, 83 insertions(+), 59 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/xcontent/UnknownNamedObjectException.java diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index b3d737430748e..0326e3d10028d 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -986,8 +986,8 @@ private enum ElasticsearchExceptionHandle { org.elasticsearch.tasks.TaskCancelledException::new, 146, Version.V_5_1_1), SHARD_LOCK_OBTAIN_FAILED_EXCEPTION(org.elasticsearch.env.ShardLockObtainFailedException.class, org.elasticsearch.env.ShardLockObtainFailedException::new, 147, Version.V_5_0_2), - UNKNOWN_NAMED_OBJECT_EXCEPTION(org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException.class, - org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException::new, 148, Version.V_5_2_0), + UNKNOWN_NAMED_OBJECT_EXCEPTION(org.elasticsearch.common.xcontent.UnknownNamedObjectException.class, + org.elasticsearch.common.xcontent.UnknownNamedObjectException::new, 148, Version.V_5_2_0), TOO_MANY_BUCKETS_EXCEPTION(MultiBucketConsumerService.TooManyBucketsException.class, MultiBucketConsumerService.TooManyBucketsException::new, 149, Version.V_6_2_0); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 8c6829ca78734..06aa51f612bcc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -43,7 +43,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException; +import org.elasticsearch.common.xcontent.UnknownNamedObjectException; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java b/server/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java index 4fb397dbe1751..c19a667776f2e 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/NamedXContentRegistry.java @@ -23,8 +23,6 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; import java.util.ArrayList; @@ -36,7 +34,6 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; -import static java.util.Objects.requireNonNull; public class NamedXContentRegistry { /** @@ -143,50 +140,4 @@ public T parseNamedObject(Class categoryClass, String name, XContentPa return categoryClass.cast(entry.parser.parse(parser, context)); } - /** - * Thrown when {@link NamedXContentRegistry#parseNamedObject(Class, String, XContentParser, Object)} is called with an unregistered - * name. When this bubbles up to the rest layer it is converted into a response with {@code 400 BAD REQUEST} status. - */ - public static class UnknownNamedObjectException extends ParsingException { - private final String categoryClass; - private final String name; - - public UnknownNamedObjectException(XContentLocation contentLocation, Class categoryClass, - String name) { - super(contentLocation, "Unknown " + categoryClass.getSimpleName() + " [" + name + "]"); - this.categoryClass = requireNonNull(categoryClass, "categoryClass is required").getName(); - this.name = requireNonNull(name, "name is required"); - } - - /** - * Read from a stream. - */ - public UnknownNamedObjectException(StreamInput in) throws IOException { - super(in); - categoryClass = in.readString(); - name = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(categoryClass); - out.writeString(name); - } - - /** - * Category class that was missing a parser. This is a String instead of a class because the class might not be on the classpath - * of all nodes or it might be exclusive to a plugin or something. - */ - public String getCategoryClass() { - return categoryClass; - } - - /** - * Name of the missing parser. - */ - public String getName() { - return name; - } - } } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/UnknownNamedObjectException.java b/server/src/main/java/org/elasticsearch/common/xcontent/UnknownNamedObjectException.java new file mode 100644 index 0000000000000..0475ab334d388 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/xcontent/UnknownNamedObjectException.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent; + +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +import static java.util.Objects.requireNonNull; + +/** + * Thrown when {@link NamedXContentRegistry#parseNamedObject(Class, String, XContentParser, Object)} is called with an unregistered + * name. When this bubbles up to the rest layer it is converted into a response with {@code 400 BAD REQUEST} status. + */ +public class UnknownNamedObjectException extends ParsingException { + private final String categoryClass; + private final String name; + + public UnknownNamedObjectException(XContentLocation contentLocation, Class categoryClass, String name) { + super(contentLocation, "Unknown " + categoryClass.getSimpleName() + " [" + name + "]"); + this.categoryClass = requireNonNull(categoryClass, "categoryClass is required").getName(); + this.name = requireNonNull(name, "name is required"); + } + + /** + * Read from a stream. + */ + public UnknownNamedObjectException(StreamInput in) throws IOException { + super(in); + categoryClass = in.readString(); + name = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(categoryClass); + out.writeString(name); + } + + /** + * Category class that was missing a parser. This is a String instead of a class because the class might not be on the classpath + * of all nodes or it might be exclusive to a plugin or something. + */ + public String getCategoryClass() { + return categoryClass; + } + + /** + * Name of the missing parser. + */ + public String getName() { + return name; + } +} diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java index c14e87661990f..3c466e59093be 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -66,7 +65,7 @@ public XContentType type() { @Override public byte streamSeparator() { - throw new ElasticsearchParseException("yaml does not support stream parsing..."); + throw new UnsupportedOperationException("yaml does not support stream parsing..."); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index d1337d5258aa9..d272bb29fbfa6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.AbstractObjectParser; -import org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException; +import org.elasticsearch.common.xcontent.UnknownNamedObjectException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index b229fa21ebfac..9fc410233f010 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CancellableThreadsTests; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.UnknownNamedObjectException; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.ShardLockObtainFailedException; @@ -813,7 +814,7 @@ public void testIds() { ids.put(145, org.elasticsearch.ElasticsearchStatusException.class); ids.put(146, org.elasticsearch.tasks.TaskCancelledException.class); ids.put(147, org.elasticsearch.env.ShardLockObtainFailedException.class); - ids.put(148, org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException.class); + ids.put(148, UnknownNamedObjectException.class); ids.put(149, MultiBucketConsumerService.TooManyBucketsException.class); Map, Integer> reverse = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 609c12fb6d874..dbb47764158c9 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -1023,7 +1023,7 @@ public void testNamedObject() throws IOException { { p.nextToken(); assertEquals("test", p.namedObject(Object.class, "str", null)); - NamedXContentRegistry.UnknownNamedObjectException e = expectThrows(NamedXContentRegistry.UnknownNamedObjectException.class, + UnknownNamedObjectException e = expectThrows(UnknownNamedObjectException.class, () -> p.namedObject(Object.class, "unknown", null)); assertEquals("Unknown Object [unknown]", e.getMessage()); assertEquals("java.lang.Object", e.getCategoryClass()); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java index 4fcc16416b56f..c623e4a196b50 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java index f550e26024d06..e31a1ce72025c 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java @@ -187,7 +187,7 @@ public void testParseTypedKeysObject() throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); - NamedXContentRegistry.UnknownNamedObjectException e = expectThrows(NamedXContentRegistry.UnknownNamedObjectException.class, + UnknownNamedObjectException e = expectThrows(UnknownNamedObjectException.class, () -> parseTypedKeysObject(parser, delimiter, Boolean.class, a -> {})); assertEquals("Unknown Boolean [type]", e.getMessage()); assertEquals("type", e.getName()); From 509abb7f2abf0f29dd370aecd25db967ca203798 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 8 Mar 2018 15:45:51 -0700 Subject: [PATCH 03/89] Switch XContentBuilder from BytesStreamOutput to ByteArrayOutputStream (#28945) This switches the underlying byte output representation used by default in `XContentBuilder` from `BytesStreamOutput` to a `ByteArrayOutputStream` (an `OutputStream` can still be specified manually) This is groundwork to allow us to decouple `XContent*` from the rest of the ES core code so that it may be factored into a separate jar. Since `BytesStreamOutput` was not using the recycling instance of `BigArrays`, this should not affect the circuit breaking capabilities elsewhere in the system. Relates to #28504 --- .../common/xcontent/XContentBuilder.java | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index e673c2a4b7ca2..16f0ac83a849f 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -20,10 +20,10 @@ package org.elasticsearch.common.xcontent; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.BytesStream; -import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.ByteSizeValue; @@ -34,6 +34,7 @@ import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; +import java.io.ByteArrayOutputStream; import java.io.Flushable; import java.io.IOException; import java.io.InputStream; @@ -58,7 +59,7 @@ public final class XContentBuilder implements Releasable, Flushable { /** * Create a new {@link XContentBuilder} using the given {@link XContent} content. *

- * The builder uses an internal {@link BytesStreamOutput} output stream to build the content. + * The builder uses an internal {@link ByteArrayOutputStream} output stream to build the content. *

* * @param xContent the {@link XContent} @@ -66,13 +67,13 @@ public final class XContentBuilder implements Releasable, Flushable { * @throws IOException if an {@link IOException} occurs while building the content */ public static XContentBuilder builder(XContent xContent) throws IOException { - return new XContentBuilder(xContent, new BytesStreamOutput()); + return new XContentBuilder(xContent, new ByteArrayOutputStream()); } /** * Create a new {@link XContentBuilder} using the given {@link XContent} content and some inclusive and/or exclusive filters. *

- * The builder uses an internal {@link BytesStreamOutput} output stream to build the content. When both exclusive and + * The builder uses an internal {@link ByteArrayOutputStream} output stream to build the content. When both exclusive and * inclusive filters are provided, the underlying builder will first use exclusion filters to remove fields and then will check the * remaining fields against the inclusive filters. *

@@ -83,7 +84,7 @@ public static XContentBuilder builder(XContent xContent) throws IOException { * @throws IOException if an {@link IOException} occurs while building the content */ public static XContentBuilder builder(XContent xContent, Set includes, Set excludes) throws IOException { - return new XContentBuilder(xContent, new BytesStreamOutput(), includes, excludes); + return new XContentBuilder(xContent, new ByteArrayOutputStream(), includes, excludes); } public static final DateTimeFormatter DEFAULT_DATE_PRINTER = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC); @@ -1036,7 +1037,11 @@ public XContentGenerator generator() { public BytesReference bytes() { close(); - return ((BytesStream) bos).bytes(); + if (bos instanceof ByteArrayOutputStream) { + return new BytesArray(((ByteArrayOutputStream) bos).toByteArray()); + } else { + return ((BytesStream) bos).bytes(); + } } /** From e1141b1ba82dc7c12872227c63e0314137760ee8 Mon Sep 17 00:00:00 2001 From: Spencer Date: Thu, 8 Mar 2018 16:05:56 -0700 Subject: [PATCH 04/89] [rest-api-spec] update doc link for /_rank_eval --- .../src/main/resources/rest-api-spec/api/rank_eval.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json index 37ff11f876470..51798c92babf6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json @@ -1,6 +1,6 @@ { "rank_eval": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-rank-eval.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/search-rank-eval.html", "methods": ["POST"], "url": { "path": "/_rank_eval", From 11e4667892bf30217768101958e7baea7ababc82 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 8 Mar 2018 15:21:45 -0800 Subject: [PATCH 05/89] Build: Fix ability to ignore when no tests are run (#28930) Running any randomized testing task within Elasticsearch currently fails if a project has zero tests. This was supposed to be overrideable, but it was always set to 'fail', and the system property to override was passed down to the test runner, but never read there. This commit changes the value of the ifNoTests setting to randomized runner to be read from system properties and continue to default to 'fail'. --- .../main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 72018368a0fe6..b72d5696af720 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -558,7 +558,7 @@ class BuildPlugin implements Plugin { return { jvm "${project.runtimeJavaHome}/bin/java" parallelism System.getProperty('tests.jvms', 'auto') - ifNoTests 'fail' + ifNoTests System.getProperty('tests.ifNoTests', 'fail') onNonEmptyWorkDirectory 'wipe' leaveTemporary true @@ -582,8 +582,6 @@ class BuildPlugin implements Plugin { systemProperty 'tests.task', path systemProperty 'tests.security.manager', 'true' systemProperty 'jna.nosys', 'true' - // default test sysprop values - systemProperty 'tests.ifNoTests', 'fail' // TODO: remove setting logging level via system property systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { From 66dd25b1c97660284555944b148eec11ccd86759 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 8 Mar 2018 15:22:59 -0800 Subject: [PATCH 06/89] Continue registering pipelines after one pipeline parse failure. (#28752) Ingest has been failing to apply existing pipelines from cluster-state into the in-memory representation that are no longer valid. One example of this is a pipeline with a script processor. If a cluster starts up with scripting disabled, these pipelines will not be loaded. Even though GETing a pipeline worked, indexing operations claimed that this pipeline did not exist. This is because one gets information from cluster-state and the other is from an in-memory data-structure. Now, two things happen 1. suppress the exceptions until after other successful pipelines are loaded 2. replace failed pipelines with a placeholder pipeline If the pipeline execution service encounters the stubbed pipeline, it is known that something went wrong at the time of pipeline creation and an exception was thrown to the user at some point at start-up. closes #28269. --- .../ingest/common/IngestRestartIT.java | 65 +++++++++++++++++++ .../elasticsearch/ingest/IngestService.java | 4 -- .../elasticsearch/ingest/PipelineStore.java | 29 ++++++++- .../elasticsearch/ingest/IngestClientIT.java | 15 +++-- ...gestProcessorNotInstalledOnAllNodesIT.java | 7 +- .../ingest/PipelineExecutionServiceTests.java | 27 ++++++++ .../ingest/PipelineStoreTests.java | 9 ++- 7 files changed, 142 insertions(+), 14 deletions(-) diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java index c62a8fd237148..a8ca20485c451 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java @@ -18,10 +18,14 @@ */ package org.elasticsearch.ingest.common; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; @@ -33,6 +37,7 @@ import java.util.Collection; import java.util.Collections; import java.util.Map; +import java.util.function.Consumer; import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; @@ -64,6 +69,66 @@ protected Map, Object>> pluginScripts() { } } + public void testScriptDisabled() throws Exception { + String pipelineIdWithoutScript = randomAlphaOfLengthBetween(5, 10); + String pipelineIdWithScript = pipelineIdWithoutScript + "_script"; + internalCluster().startNode(); + + BytesReference pipelineWithScript = new BytesArray("{\n" + + " \"processors\" : [\n" + + " {\"script\" : {\"lang\": \"" + MockScriptEngine.NAME + "\", \"source\": \"my_script\"}}\n" + + " ]\n" + + "}"); + BytesReference pipelineWithoutScript = new BytesArray("{\n" + + " \"processors\" : [\n" + + " {\"set\" : {\"field\": \"y\", \"value\": 0}}\n" + + " ]\n" + + "}"); + + Consumer checkPipelineExists = (id) -> assertThat(client().admin().cluster().prepareGetPipeline(id) + .get().pipelines().get(0).getId(), equalTo(id)); + + client().admin().cluster().preparePutPipeline(pipelineIdWithScript, pipelineWithScript, XContentType.JSON).get(); + client().admin().cluster().preparePutPipeline(pipelineIdWithoutScript, pipelineWithoutScript, XContentType.JSON).get(); + + checkPipelineExists.accept(pipelineIdWithScript); + checkPipelineExists.accept(pipelineIdWithoutScript); + + + internalCluster().stopCurrentMasterNode(); + internalCluster().startNode(Settings.builder().put("script.allowed_types", "none")); + + checkPipelineExists.accept(pipelineIdWithoutScript); + checkPipelineExists.accept(pipelineIdWithScript); + + client().prepareIndex("index", "doc", "1") + .setSource("x", 0) + .setPipeline(pipelineIdWithoutScript) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> client().prepareIndex("index", "doc", "2") + .setSource("x", 0) + .setPipeline(pipelineIdWithScript) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get()); + assertThat(exception.getHeaderKeys(), equalTo(Sets.newHashSet("processor_type"))); + assertThat(exception.getHeader("processor_type"), equalTo(Arrays.asList("unknown"))); + assertThat(exception.getRootCause().getMessage(), + equalTo("pipeline with id [" + pipelineIdWithScript + "] could not be loaded, caused by " + + "[ElasticsearchParseException[Error updating pipeline with id [" + pipelineIdWithScript + "]]; " + + "nested: ElasticsearchException[java.lang.IllegalArgumentException: cannot execute [inline] scripts]; " + + "nested: IllegalArgumentException[cannot execute [inline] scripts];; " + + "ElasticsearchException[java.lang.IllegalArgumentException: cannot execute [inline] scripts]; " + + "nested: IllegalArgumentException[cannot execute [inline] scripts];; java.lang.IllegalArgumentException: " + + "cannot execute [inline] scripts]")); + + Map source = client().prepareGet("index", "doc", "1").get().getSource(); + assertThat(source.get("x"), equalTo(0)); + assertThat(source.get("y"), equalTo(0)); + } + public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exception { internalCluster().startNode(); diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 4a018ca025896..ad2b8643f7ae3 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -25,8 +25,6 @@ import java.util.List; import java.util.Map; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.AnalysisRegistry; @@ -34,8 +32,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; -import static org.elasticsearch.common.settings.Setting.Property; - /** * Holder class for several ingest related services. */ diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java index 21372e46e5f3d..c6dce0bd45b3c 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -81,16 +81,41 @@ void innerUpdatePipelines(ClusterState previousState, ClusterState state) { } Map pipelines = new HashMap<>(); + List exceptions = new ArrayList<>(); for (PipelineConfiguration pipeline : ingestMetadata.getPipelines().values()) { try { pipelines.put(pipeline.getId(), factory.create(pipeline.getId(), pipeline.getConfigAsMap(), processorFactories)); } catch (ElasticsearchParseException e) { - throw e; + pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), e)); + exceptions.add(e); } catch (Exception e) { - throw new ElasticsearchParseException("Error updating pipeline with id [" + pipeline.getId() + "]", e); + ElasticsearchParseException parseException = new ElasticsearchParseException( + "Error updating pipeline with id [" + pipeline.getId() + "]", e); + pipelines.put(pipeline.getId(), substitutePipeline(pipeline.getId(), parseException)); + exceptions.add(parseException); } } this.pipelines = Collections.unmodifiableMap(pipelines); + ExceptionsHelper.rethrowAndSuppress(exceptions); + } + + private Pipeline substitutePipeline(String id, ElasticsearchParseException e) { + String tag = e.getHeaderKeys().contains("processor_tag") ? e.getHeader("processor_tag").get(0) : null; + String type = e.getHeaderKeys().contains("processor_type") ? e.getHeader("processor_type").get(0) : "unknown"; + String errorMessage = "pipeline with id [" + id + "] could not be loaded, caused by [" + e.getDetailedMessage() + "]"; + Processor failureProcessor = new AbstractProcessor(tag) { + @Override + public void execute(IngestDocument ingestDocument) { + throw new IllegalStateException(errorMessage); + } + + @Override + public String getType() { + return type; + } + }; + String description = "this is a place holder pipeline, because pipeline with id [" + id + "] could not be loaded"; + return new Pipeline(id, description, null, new CompoundProcessor(failureProcessor)); } /** diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index dbbc8e443c076..809a81b687e80 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -36,16 +36,12 @@ import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.WritePipelineResponse; -import org.elasticsearch.action.support.replication.TransportReplicationActionTests; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.Requests; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -130,6 +126,10 @@ public void testSimulate() throws Exception { IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, source); assertThat(simulateDocumentBaseResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); + + // cleanup + WritePipelineResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get(); + assertTrue(deletePipelineResponse.isAcknowledged()); } public void testBulkWithIngestFailures() throws Exception { @@ -172,6 +172,10 @@ public void testBulkWithIngestFailures() throws Exception { assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } } + + // cleanup + WritePipelineResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get(); + assertTrue(deletePipelineResponse.isAcknowledged()); } public void testBulkWithUpsert() throws Exception { @@ -271,5 +275,8 @@ public void testPutWithPipelineFactoryError() throws Exception { assertNotNull(ex); assertThat(ex.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]")); } + + GetPipelineResponse response = client().admin().cluster().prepareGetPipeline("_id").get(); + assertFalse(response.isFound()); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java index 645933348879c..03777b98ab73e 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java @@ -37,7 +37,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.ClusterScope(numDataNodes = 0, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase { @@ -104,7 +103,11 @@ public void testFailStartNode() throws Exception { installPlugin = false; String node2 = internalCluster().startNode(); pipeline = internalCluster().getInstance(NodeService.class, node2).getIngestService().getPipelineStore().get("_id"); - assertThat(pipeline, nullValue()); + + assertNotNull(pipeline); + assertThat(pipeline.getId(), equalTo("_id")); + assertThat(pipeline.getDescription(), equalTo("this is a place holder pipeline, " + + "because pipeline with id [_id] could not be loaded")); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 3247761a548f0..5a3b57a6d7e0b 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; @@ -92,6 +93,32 @@ public void testExecuteIndexPipelineDoesNotExist() { verify(completionHandler, never()).accept(anyBoolean()); } + public void testExecuteIndexPipelineExistsButFailedParsing() { + when(store.get("_id")).thenReturn(new Pipeline("_id", "stub", null, + new CompoundProcessor(new AbstractProcessor("mock") { + @Override + public void execute(IngestDocument ingestDocument) { + throw new IllegalStateException("error"); + } + + @Override + public String getType() { + return null; + } + }))); + SetOnce failed = new SetOnce<>(); + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); + Consumer failureHandler = (e) -> { + assertThat(e.getCause().getClass(), equalTo(IllegalArgumentException.class)); + assertThat(e.getCause().getCause().getClass(), equalTo(IllegalStateException.class)); + assertThat(e.getCause().getCause().getMessage(), equalTo("error")); + failed.set(true); + }; + Consumer completionHandler = (e) -> failed.set(false); + executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); + assertTrue(failed.get()); + } + public void testExecuteBulkPipelineDoesNotExist() { CompoundProcessor processor = mock(CompoundProcessor.class); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, processor)); diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index bb0d57871208c..250bb5059cf58 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; @@ -165,7 +164,13 @@ public void testPutWithErrorResponse() { assertThat(e.getMessage(), equalTo("[processors] required property is missing")); } pipeline = store.get(id); - assertThat(pipeline, nullValue()); + assertNotNull(pipeline); + assertThat(pipeline.getId(), equalTo("_id")); + assertThat(pipeline.getDescription(), equalTo("this is a place holder pipeline, because pipeline with" + + " id [_id] could not be loaded")); + assertThat(pipeline.getProcessors().size(), equalTo(1)); + assertNull(pipeline.getProcessors().get(0).getTag()); + assertThat(pipeline.getProcessors().get(0).getType(), equalTo("unknown")); } public void testDelete() { From 7396fba0b4913cdd74ee07f556f18876d31a9ed8 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 8 Mar 2018 17:05:11 -0700 Subject: [PATCH 07/89] Remove FastCharArrayReader and FastCharArrayWriter (#28951) These classes are used only in two places, and can be replaced by the `CharArrayReader` and `CharArrayWriter`. The JDK can also perform lock biasing and elision as well as escape analysis to optimize away non-contended locks, rendering their lock-free implementations unnecessary. --- .../common/io/FastCharArrayReader.java | 225 -------------- .../common/io/FastCharArrayWriter.java | 277 ------------------ .../phrase/DirectCandidateGenerator.java | 4 +- .../phrase/NoisyChannelSpellChecker.java | 4 +- 4 files changed, 4 insertions(+), 506 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/common/io/FastCharArrayReader.java delete mode 100644 server/src/main/java/org/elasticsearch/common/io/FastCharArrayWriter.java diff --git a/server/src/main/java/org/elasticsearch/common/io/FastCharArrayReader.java b/server/src/main/java/org/elasticsearch/common/io/FastCharArrayReader.java deleted file mode 100644 index f75d8d1c96a63..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/io/FastCharArrayReader.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.io; - -import java.io.IOException; -import java.io.Reader; - -public class FastCharArrayReader extends Reader { - - /** - * The character buffer. - */ - protected char buf[]; - - /** - * The current buffer position. - */ - protected int pos; - - /** - * The position of mark in buffer. - */ - protected int markedPos = 0; - - /** - * The index of the end of this buffer. There is not valid - * data at or beyond this index. - */ - protected int count; - - /** - * Creates a CharArrayReader from the specified array of chars. - * - * @param buf Input buffer (not copied) - */ - public FastCharArrayReader(char buf[]) { - this.buf = buf; - this.pos = 0; - this.count = buf.length; - } - - /** - * Creates a CharArrayReader from the specified array of chars. - *

- * The resulting reader will start reading at the given - * offset. The total number of char values that can be - * read from this reader will be either length or - * buf.length-offset, whichever is smaller. - * - * @param buf Input buffer (not copied) - * @param offset Offset of the first char to read - * @param length Number of chars to read - * @throws IllegalArgumentException If offset is negative or greater than - * buf.length, or if length is negative, or if - * the sum of these two values is negative. - */ - public FastCharArrayReader(char buf[], int offset, int length) { - if ((offset < 0) || (offset > buf.length) || (length < 0) || - ((offset + length) < 0)) { - throw new IllegalArgumentException(); - } - this.buf = buf; - this.pos = offset; - this.count = Math.min(offset + length, buf.length); - this.markedPos = offset; - } - - /** - * Checks to make sure that the stream has not been closed - */ - private void ensureOpen() throws IOException { - if (buf == null) - throw new IOException("Stream closed"); - } - - /** - * Reads a single character. - * - * @throws IOException If an I/O error occurs - */ - @Override - public int read() throws IOException { - ensureOpen(); - if (pos >= count) - return -1; - else - return buf[pos++]; - } - - /** - * Reads characters into a portion of an array. - * - * @param b Destination buffer - * @param off Offset at which to start storing characters - * @param len Maximum number of characters to read - * @return The actual number of characters read, or -1 if - * the end of the stream has been reached - * @throws IOException If an I/O error occurs - */ - @Override - public int read(char b[], int off, int len) throws IOException { - ensureOpen(); - if ((off < 0) || (off > b.length) || (len < 0) || - ((off + len) > b.length) || ((off + len) < 0)) { - throw new IndexOutOfBoundsException(); - } else if (len == 0) { - return 0; - } - - if (pos >= count) { - return -1; - } - if (pos + len > count) { - len = count - pos; - } - if (len <= 0) { - return 0; - } - System.arraycopy(buf, pos, b, off, len); - pos += len; - return len; - } - - /** - * Skips characters. Returns the number of characters that were skipped. - *

- * The n parameter may be negative, even though the - * skip method of the {@link Reader} superclass throws - * an exception in this case. If n is negative, then - * this method does nothing and returns 0. - * - * @param n The number of characters to skip - * @return The number of characters actually skipped - * @throws IOException If the stream is closed, or an I/O error occurs - */ - @Override - public long skip(long n) throws IOException { - ensureOpen(); - if (pos + n > count) { - n = count - pos; - } - if (n < 0) { - return 0; - } - pos += n; - return n; - } - - /** - * Tells whether this stream is ready to be read. Character-array readers - * are always ready to be read. - * - * @throws IOException If an I/O error occurs - */ - @Override - public boolean ready() throws IOException { - ensureOpen(); - return (count - pos) > 0; - } - - /** - * Tells whether this stream supports the mark() operation, which it does. - */ - @Override - public boolean markSupported() { - return true; - } - - /** - * Marks the present position in the stream. Subsequent calls to reset() - * will reposition the stream to this point. - * - * @param readAheadLimit Limit on the number of characters that may be - * read while still preserving the mark. Because - * the stream's input comes from a character array, - * there is no actual limit; hence this argument is - * ignored. - * @throws IOException If an I/O error occurs - */ - @Override - public void mark(int readAheadLimit) throws IOException { - ensureOpen(); - markedPos = pos; - } - - /** - * Resets the stream to the most recent mark, or to the beginning if it has - * never been marked. - * - * @throws IOException If an I/O error occurs - */ - @Override - public void reset() throws IOException { - ensureOpen(); - pos = markedPos; - } - - /** - * Closes the stream and releases any system resources associated with - * it. Once the stream has been closed, further read(), ready(), - * mark(), reset(), or skip() invocations will throw an IOException. - * Closing a previously closed stream has no effect. - */ - @Override - public void close() { - buf = null; - } -} diff --git a/server/src/main/java/org/elasticsearch/common/io/FastCharArrayWriter.java b/server/src/main/java/org/elasticsearch/common/io/FastCharArrayWriter.java deleted file mode 100644 index 87313eae7f938..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/io/FastCharArrayWriter.java +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.io; - -import java.io.IOException; -import java.io.Writer; -import java.util.Arrays; - -/** - * A similar class to {@link java.io.CharArrayWriter} allowing to get the underlying char[] buffer. - */ -public class FastCharArrayWriter extends Writer { - - /** - * The buffer where data is stored. - */ - protected char buf[]; - - /** - * The number of chars in the buffer. - */ - protected int count; - - /** - * Creates a new CharArrayWriter. - */ - public FastCharArrayWriter() { - this(32); - } - - /** - * Creates a new CharArrayWriter with the specified initial size. - * - * @param initialSize an int specifying the initial buffer size. - * @throws IllegalArgumentException if initialSize is negative - */ - public FastCharArrayWriter(int initialSize) { - if (initialSize < 0) { - throw new IllegalArgumentException("Negative initial size: " - + initialSize); - } - buf = new char[initialSize]; - } - - /** - * Writes a character to the buffer. - */ - @Override - public void write(int c) { - int newcount = count + 1; - if (newcount > buf.length) { - buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); - } - buf[count] = (char) c; - count = newcount; - } - - /** - * Writes characters to the buffer. - * - * @param c the data to be written - * @param off the start offset in the data - * @param len the number of chars that are written - */ - @Override - public void write(char c[], int off, int len) { - if ((off < 0) || (off > c.length) || (len < 0) || - ((off + len) > c.length) || ((off + len) < 0)) { - throw new IndexOutOfBoundsException(); - } else if (len == 0) { - return; - } - int newcount = count + len; - if (newcount > buf.length) { - buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); - } - System.arraycopy(c, off, buf, count, len); - count = newcount; - } - - /** - * Write a portion of a string to the buffer. - * - * @param str String to be written from - * @param off Offset from which to start reading characters - * @param len Number of characters to be written - */ - @Override - public void write(String str, int off, int len) { - int newcount = count + len; - if (newcount > buf.length) { - buf = Arrays.copyOf(buf, Math.max(buf.length << 1, newcount)); - } - str.getChars(off, off + len, buf, count); - count = newcount; - } - - /** - * Writes the contents of the buffer to another character stream. - * - * @param out the output stream to write to - * @throws java.io.IOException If an I/O error occurs. - */ - public void writeTo(Writer out) throws IOException { - out.write(buf, 0, count); - } - - /** - * Appends the specified character sequence to this writer. - *

- * An invocation of this method of the form out.append(csq) - * behaves in exactly the same way as the invocation - *

-     *     out.write(csq.toString()) 
- * - *

Depending on the specification of toString for the - * character sequence csq, the entire sequence may not be - * appended. For instance, invoking the toString method of a - * character buffer will return a subsequence whose content depends upon - * the buffer's position and limit. - * - * @param csq The character sequence to append. If csq is - * null, then the four characters "null" are - * appended to this writer. - * @return This writer - * @since 1.5 - */ - @Override - public FastCharArrayWriter append(CharSequence csq) { - String s = (csq == null ? "null" : csq.toString()); - write(s, 0, s.length()); - return this; - } - - /** - * Appends a subsequence of the specified character sequence to this writer. - *

- * An invocation of this method of the form out.append(csq, start, - * end) when csq is not null, behaves in - * exactly the same way as the invocation - *

-     *     out.write(csq.subSequence(start, end).toString()) 
- * - * @param csq The character sequence from which a subsequence will be - * appended. If csq is null, then characters - * will be appended as if csq contained the four - * characters "null". - * @param start The index of the first character in the subsequence - * @param end The index of the character following the last character in the - * subsequence - * @return This writer - * @throws IndexOutOfBoundsException If start or end are negative, start - * is greater than end, or end is greater than - * csq.length() - * @since 1.5 - */ - @Override - public FastCharArrayWriter append(CharSequence csq, int start, int end) { - String s = (csq == null ? "null" : csq).subSequence(start, end).toString(); - write(s, 0, s.length()); - return this; - } - - /** - * Appends the specified character to this writer. - *

- * An invocation of this method of the form out.append(c) - * behaves in exactly the same way as the invocation - *

-     *     out.write(c) 
- * - * @param c The 16-bit character to append - * @return This writer - * @since 1.5 - */ - @Override - public FastCharArrayWriter append(char c) { - write(c); - return this; - } - - /** - * Resets the buffer so that you can use it again without - * throwing away the already allocated buffer. - */ - public void reset() { - count = 0; - } - - /** - * Returns a copy of the input data. - * - * @return an array of chars copied from the input data. - */ - public char toCharArray()[] { - return Arrays.copyOf(buf, count); - } - - /** - * Returns the underlying char array. Note, use {@link #size()} in order to know the size of - * of the actual content within the array. - */ - public char[] unsafeCharArray() { - return buf; - } - - /** - * Returns the current size of the buffer. - * - * @return an int representing the current size of the buffer. - */ - public int size() { - return count; - } - - /** - * Converts input data to a string. - * - * @return the string. - */ - @Override - public String toString() { - return new String(buf, 0, count); - } - - /** - * Converts the input data to a string with trimmed whitespaces. - */ - public String toStringTrim() { - int st = 0; - int len = count; - char[] val = buf; /* avoid getfield opcode */ - - while ((st < len) && (val[st] <= ' ')) { - st++; - len--; - } - while ((st < len) && (val[len - 1] <= ' ')) { - len--; - } - return new String(buf, st, len); - } - - /** - * Flush the stream. - */ - @Override - public void flush() { - } - - /** - * Close the stream. This method does not release the buffer, since its - * contents might still be required. Note: Invoking this method in this class - * will have no effect. - */ - @Override - public void close() { - } - -} diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index b874c3aeca311..4d6fceba869ff 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -36,8 +36,8 @@ import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.io.FastCharArrayReader; +import java.io.CharArrayReader; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -314,7 +314,7 @@ public static int analyze(Analyzer analyzer, BytesRef toAnalyze, String field, T spare.copyUTF8Bytes(toAnalyze); CharsRef charsRef = spare.get(); try (TokenStream ts = analyzer.tokenStream( - field, new FastCharArrayReader(charsRef.chars, charsRef.offset, charsRef.length))) { + field, new CharArrayReader(charsRef.chars, charsRef.offset, charsRef.length))) { return analyze(ts, consumer); } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index e6e1767386061..eb9694c6039b7 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -27,10 +27,10 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.common.io.FastCharArrayReader; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet; +import java.io.CharArrayReader; import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -139,7 +139,7 @@ public Result getCorrections(Analyzer analyzer, BytesRef query, CandidateGenerat public TokenStream tokenStream(Analyzer analyzer, BytesRef query, CharsRefBuilder spare, String field) throws IOException { spare.copyUTF8Bytes(query); - return analyzer.tokenStream(field, new FastCharArrayReader(spare.chars(), 0, spare.length())); + return analyzer.tokenStream(field, new CharArrayReader(spare.chars(), 0, spare.length())); } public static class Result { From 18be4c0928db820f622c0225dadeef164ca125db Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 8 Mar 2018 17:17:36 -0700 Subject: [PATCH 08/89] Remove FastStringReader in favor of vanilla StringReader (#28944) This allows us to remove another dependency in the decoupling of the XContent code. Rather than move this class over or decouple it, it can simply be removed. Relates tangentially to #28504 --- .../forbidden/es-server-signatures.txt | 2 - .../script/mustache/MustacheScriptEngine.java | 4 +- .../analyze/TransportAnalyzeAction.java | 8 +- .../org/elasticsearch/common/Strings.java | 4 +- .../common/geo/parsers/GeoWKTParser.java | 4 +- .../common/io/FastStringReader.java | 208 ------------------ .../lucene/search/MoreLikeThisQuery.java | 4 +- .../common/lucene/search/XMoreLikeThis.java | 4 +- .../common/xcontent/cbor/CborXContent.java | 4 +- .../common/xcontent/json/JsonXContent.java | 4 +- .../common/xcontent/smile/SmileXContent.java | 4 +- .../common/xcontent/yaml/YamlXContent.java | 4 +- .../SynonymGraphTokenFilterFactory.java | 1 - .../analysis/SynonymTokenFilterFactory.java | 4 +- 14 files changed, 24 insertions(+), 235 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/common/io/FastStringReader.java diff --git a/buildSrc/src/main/resources/forbidden/es-server-signatures.txt b/buildSrc/src/main/resources/forbidden/es-server-signatures.txt index 89179350174a6..9db17aaac0e93 100644 --- a/buildSrc/src/main/resources/forbidden/es-server-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-server-signatures.txt @@ -29,8 +29,6 @@ java.util.concurrent.Executors#privilegedThreadFactory() java.lang.Character#codePointBefore(char[],int) @ Implicit start offset is error-prone when the char[] is a buffer and the first chars are random chars java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone when the char[] is a buffer and the last chars are random chars -java.io.StringReader#(java.lang.String) @ Use FastStringReader instead - @defaultMessage Reference management is tricky, leave it to SearcherManager org.apache.lucene.index.IndexReader#decRef() org.apache.lucene.index.IndexReader#incRef() diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java index 16081b3dd1b12..5a0b2e15460c5 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java @@ -21,11 +21,11 @@ import com.github.mustachejava.Mustache; import com.github.mustachejava.MustacheFactory; +import java.io.StringReader; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.Script; @@ -65,7 +65,7 @@ public T compile(String templateName, String templateSource, ScriptContext new MustacheExecutableScript(template, params); return context.factoryClazz.cast(compiled); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index dd8a381252db0..0b791cb78506a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -41,7 +41,6 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; @@ -66,6 +65,7 @@ import java.io.IOException; import java.io.Reader; +import java.io.StringReader; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -315,12 +315,12 @@ private static DetailAnalyzeResponse detailAnalyze(AnalyzeRequest request, Analy for (int textIndex = 0; textIndex < request.text().length; textIndex++) { String charFilteredSource = request.text()[textIndex]; - Reader reader = new FastStringReader(charFilteredSource); + Reader reader = new StringReader(charFilteredSource); if (charFilterFactories != null) { for (int charFilterIndex = 0; charFilterIndex < charFilterFactories.length; charFilterIndex++) { reader = charFilterFactories[charFilterIndex].create(reader); - Reader readerForWriteOut = new FastStringReader(charFilteredSource); + Reader readerForWriteOut = new StringReader(charFilteredSource); readerForWriteOut = charFilterFactories[charFilterIndex].create(readerForWriteOut); charFilteredSource = writeCharStream(readerForWriteOut); charFiltersTexts[charFilterIndex][textIndex] = charFilteredSource; @@ -380,7 +380,7 @@ private static DetailAnalyzeResponse detailAnalyze(AnalyzeRequest request, Analy } private static TokenStream createStackedTokenStream(String source, CharFilterFactory[] charFilterFactories, TokenizerFactory tokenizerFactory, TokenFilterFactory[] tokenFilterFactories, int current) { - Reader reader = new FastStringReader(source); + Reader reader = new StringReader(source); for (CharFilterFactory charFilterFactory : charFilterFactories) { reader = charFilterFactory.create(reader); } diff --git a/server/src/main/java/org/elasticsearch/common/Strings.java b/server/src/main/java/org/elasticsearch/common/Strings.java index 02a0852b0a03a..8c823f401a0f8 100644 --- a/server/src/main/java/org/elasticsearch/common/Strings.java +++ b/server/src/main/java/org/elasticsearch/common/Strings.java @@ -23,7 +23,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -31,6 +30,7 @@ import java.io.BufferedReader; import java.io.IOException; +import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -51,7 +51,7 @@ public class Strings { public static final String[] EMPTY_ARRAY = new String[0]; public static void spaceify(int spaces, String from, StringBuilder to) throws Exception { - try (BufferedReader reader = new BufferedReader(new FastStringReader(from))) { + try (BufferedReader reader = new BufferedReader(new StringReader(from))) { String line; while ((line = reader.readLine()) != null) { for (int i = 0; i < spaces; i++) { diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java index 38643df017943..2a8110c5f4dc2 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.GeoShapeType; +import java.io.StringReader; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; @@ -32,7 +33,6 @@ import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentParser; @@ -69,7 +69,7 @@ public static ShapeBuilder parse(XContentParser parser) /** throws an exception if the parsed geometry type does not match the expected shape type */ public static ShapeBuilder parseExpectedType(XContentParser parser, final GeoShapeType shapeType) throws IOException, ElasticsearchParseException { - FastStringReader reader = new FastStringReader(parser.text()); + StringReader reader = new StringReader(parser.text()); try { // setup the tokenizer; configured to read words w/o numbers StreamTokenizer tokenizer = new StreamTokenizer(reader); diff --git a/server/src/main/java/org/elasticsearch/common/io/FastStringReader.java b/server/src/main/java/org/elasticsearch/common/io/FastStringReader.java deleted file mode 100644 index 2ac7e9022e687..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/io/FastStringReader.java +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.io; - -import java.io.IOException; -import java.io.Reader; - -/** - * A character stream whose source is a string that is not thread safe - *

- * (shay.banon - * ) - */ -public class FastStringReader extends Reader implements CharSequence { - - private String str; - private int length; - private int next = 0; - private int mark = 0; - private boolean closed = false; - - /** - * Creates a new string reader. - * - * @param s String providing the character stream. - */ - public FastStringReader(String s) { - this.str = s; - this.length = s.length(); - } - - /** - * Check to make sure that the stream has not been closed - */ - private void ensureOpen() throws IOException { - if (closed) { - throw new IOException("Stream closed"); - } - } - - @Override - public int length() { - return length; - } - - @Override - public char charAt(int index) { - return str.charAt(index); - } - - @Override - public CharSequence subSequence(int start, int end) { - return str.subSequence(start, end); - } - - /** - * Reads a single character. - * - * @return The character read, or -1 if the end of the stream has been - * reached - * @throws IOException If an I/O error occurs - */ - @Override - public int read() throws IOException { - ensureOpen(); - if (next >= length) - return -1; - return str.charAt(next++); - } - - /** - * Reads characters into a portion of an array. - * - * @param cbuf Destination buffer - * @param off Offset at which to start writing characters - * @param len Maximum number of characters to read - * @return The number of characters read, or -1 if the end of the - * stream has been reached - * @throws IOException If an I/O error occurs - */ - @Override - public int read(char cbuf[], int off, int len) throws IOException { - ensureOpen(); - if (len == 0) { - return 0; - } - if (next >= length) - return -1; - int n = Math.min(length - next, len); - str.getChars(next, next + n, cbuf, off); - next += n; - return n; - } - - /** - * Skips the specified number of characters in the stream. Returns - * the number of characters that were skipped. - *

- * The ns parameter may be negative, even though the - * skip method of the {@link Reader} superclass throws - * an exception in this case. Negative values of ns cause the - * stream to skip backwards. Negative return values indicate a skip - * backwards. It is not possible to skip backwards past the beginning of - * the string. - *

- * If the entire string has been read or skipped, then this method has - * no effect and always returns 0. - * - * @throws IOException If an I/O error occurs - */ - @Override - public long skip(long ns) throws IOException { - ensureOpen(); - if (next >= length) - return 0; - // Bound skip by beginning and end of the source - long n = Math.min(length - next, ns); - n = Math.max(-next, n); - next += n; - return n; - } - - /** - * Tells whether this stream is ready to be read. - * - * @return True if the next read() is guaranteed not to block for input - * @throws IOException If the stream is closed - */ - @Override - public boolean ready() throws IOException { - ensureOpen(); - return true; - } - - /** - * Tells whether this stream supports the mark() operation, which it does. - */ - @Override - public boolean markSupported() { - return true; - } - - /** - * Marks the present position in the stream. Subsequent calls to reset() - * will reposition the stream to this point. - * - * @param readAheadLimit Limit on the number of characters that may be - * read while still preserving the mark. Because - * the stream's input comes from a string, there - * is no actual limit, so this argument must not - * be negative, but is otherwise ignored. - * @throws IllegalArgumentException If readAheadLimit is < 0 - * @throws IOException If an I/O error occurs - */ - @Override - public void mark(int readAheadLimit) throws IOException { - if (readAheadLimit < 0) { - throw new IllegalArgumentException("Read-ahead limit < 0"); - } - ensureOpen(); - mark = next; - } - - /** - * Resets the stream to the most recent mark, or to the beginning of the - * string if it has never been marked. - * - * @throws IOException If an I/O error occurs - */ - @Override - public void reset() throws IOException { - ensureOpen(); - next = mark; - } - - /** - * Closes the stream and releases any system resources associated with - * it. Once the stream has been closed, further read(), - * ready(), mark(), or reset() invocations will throw an IOException. - * Closing a previously closed stream has no effect. - */ - @Override - public void close() { - closed = true; - } - - @Override - public String toString() { - return str; - } -} diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java index 28971fc9ca45e..f79f45f3b62bd 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java @@ -35,10 +35,10 @@ import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.FastStringReader; import java.io.IOException; import java.io.Reader; +import java.io.StringReader; import java.util.Arrays; import java.util.HashSet; import java.util.List; @@ -166,7 +166,7 @@ private Query createQuery(XMoreLikeThis mlt) throws IOException { if (this.likeText != null) { Reader[] readers = new Reader[likeText.length]; for (int i = 0; i < readers.length; i++) { - readers[i] = new FastStringReader(likeText[i]); + readers[i] = new StringReader(likeText[i]); } //LUCENE 4 UPGRADE this mapps the 3.6 behavior (only use the first field) Query mltQuery = mlt.like(moreLikeFields[0], readers); diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index e973689615ed7..5d1e4537f6561 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -58,10 +58,10 @@ import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.io.FastStringReader; import java.io.IOException; import java.io.Reader; +import java.io.StringReader; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -815,7 +815,7 @@ private PriorityQueue retrieveTerms(int docNum) throws IOException { for (IndexableField field : fields) { final String stringValue = field.stringValue(); if (stringValue != null) { - addTermFrequencies(new FastStringReader(stringValue), termFreqMap, fieldName); + addTermFrequencies(new StringReader(stringValue), termFreqMap, fieldName); } } } else { diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java index 222cf8e98bd32..58a9e9a98f833 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -37,6 +36,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; +import java.io.StringReader; import java.util.Set; /** @@ -81,7 +81,7 @@ public XContentGenerator createGenerator(OutputStream os, Set includes, @Override public XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, String content) throws IOException { - return new CborXContentParser(xContentRegistry, deprecationHandler, cborFactory.createParser(new FastStringReader(content))); + return new CborXContentParser(xContentRegistry, deprecationHandler, cborFactory.createParser(new StringReader(content))); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java index 813aa64d9ffa3..b2aac37abe57d 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -36,6 +35,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; +import java.io.StringReader; import java.util.Set; /** @@ -82,7 +82,7 @@ public XContentGenerator createGenerator(OutputStream os, Set includes, @Override public XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, String content) throws IOException { - return new JsonXContentParser(xContentRegistry, deprecationHandler, jsonFactory.createParser(new FastStringReader(content))); + return new JsonXContentParser(xContentRegistry, deprecationHandler, jsonFactory.createParser(new StringReader(content))); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java index bbe3542f29c5a..caf6488eea398 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -37,6 +36,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; +import java.io.StringReader; import java.util.Set; /** @@ -82,7 +82,7 @@ public XContentGenerator createGenerator(OutputStream os, Set includes, @Override public XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, String content) throws IOException { - return new SmileXContentParser(xContentRegistry, deprecationHandler, smileFactory.createParser(new FastStringReader(content))); + return new SmileXContentParser(xContentRegistry, deprecationHandler, smileFactory.createParser(new StringReader(content))); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java index 3c466e59093be..5c335276bc024 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -35,6 +34,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; +import java.io.StringReader; import java.util.Set; /** @@ -76,7 +76,7 @@ public XContentGenerator createGenerator(OutputStream os, Set includes, @Override public XContentParser createParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, String content) throws IOException { - return new YamlXContentParser(xContentRegistry, deprecationHandler, yamlFactory.createParser(new FastStringReader(content))); + return new YamlXContentParser(xContentRegistry, deprecationHandler, yamlFactory.createParser(new StringReader(content))); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java index 2da3d8bc07a44..090f82c9b1541 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java @@ -25,7 +25,6 @@ import org.apache.lucene.analysis.synonym.SynonymGraphFilter; import org.apache.lucene.analysis.synonym.SynonymMap; import org.apache.lucene.analysis.synonym.WordnetSynonymParser; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java index f90b2e4df4b5c..e29c946faaf4a 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java @@ -28,7 +28,6 @@ import org.apache.lucene.analysis.synonym.SynonymMap; import org.apache.lucene.analysis.synonym.WordnetSynonymParser; import org.elasticsearch.Version; -import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -36,6 +35,7 @@ import java.io.IOException; import java.io.Reader; +import java.io.StringReader; import java.util.List; public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { @@ -97,7 +97,7 @@ protected Reader getRulesFromSettings(Environment env) { for (String line : rulesList) { sb.append(line).append(System.lineSeparator()); } - rulesReader = new FastStringReader(sb.toString()); + rulesReader = new StringReader(sb.toString()); } else if (settings.get("synonyms_path") != null) { rulesReader = Analysis.getReaderFromFile(env, settings, "synonyms_path"); } else { From e690912f5dbf82a2fb9171e8ac1dc668d1fd28b6 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 8 Mar 2018 19:46:54 -0800 Subject: [PATCH 09/89] Build: Remove rest tests on archive distribution projects (#28952) This commit removes running rest tests on the full zip and tar distributions in favor of doing a simple extraction check like is done for rpm and deb files. The rest tests are still run on the integ test zip, at least for now (this should eventually be moved out to a different location). --- distribution/archives/build.gradle | 31 +++++++++++++--- .../test/rest/TarClientYamlTestSuiteIT.java | 37 ------------------- .../test/smoke_test_plugins/10_modules.yml | 13 ------- .../test/rest/ZipClientYamlTestSuiteIT.java | 37 ------------------- .../test/smoke_test_plugins/10_modules.yml | 13 ------- 5 files changed, 26 insertions(+), 105 deletions(-) delete mode 100644 distribution/archives/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java delete mode 100644 distribution/archives/tar/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yml delete mode 100644 distribution/archives/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java delete mode 100644 distribution/archives/zip/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yml diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 93960a3ac21b2..bb59bc84f5385 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -21,6 +21,7 @@ import org.apache.tools.ant.taskdefs.condition.Os import org.apache.tools.ant.filters.FixCrLfFilter import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.EmptyDirTask +import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.plugin.PluginBuildPlugin @@ -125,19 +126,39 @@ subprojects { artifacts { 'default' buildDist } + + // sanity checks if a archives can be extracted + File extractionDir = new File(buildDir, 'extracted') + task testExtraction(type: LoggedExec) { + dependsOn buildDist + doFirst { + project.delete(extractionDir) + extractionDir.mkdirs() + } + } + if (project.name.contains('zip')) { + testExtraction { + onlyIf { new File('/bin/unzip').exists() || new File('/usr/bin/unzip').exists() || new File('/usr/local/bin/unzip').exists() } + commandLine 'unzip', "${-> buildDist.outputs.files.singleFile}", '-d', extractionDir + } + } else { // tar + testExtraction { + onlyIf { new File('/bin/tar').exists() || new File('/usr/bin/tar').exists() || new File('/usr/local/bin/tar').exists() } + commandLine 'tar', '-xvzf', "${-> buildDist.outputs.files.singleFile}", '-C', extractionDir + } + } + check.dependsOn testExtraction } /***************************************************************************** * Rest test config * *****************************************************************************/ -subprojects { +configure(subprojects.findAll { it.name == 'integ-test-zip' }) { apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' - if (project.name == 'integ-test-zip') { - integTest { - includePackaged true - } + integTest { + includePackaged true } integTestCluster { diff --git a/distribution/archives/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java b/distribution/archives/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java deleted file mode 100644 index 391d6fe688fd5..0000000000000 --- a/distribution/archives/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class TarClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public TarClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return createParameters(); - } -} diff --git a/distribution/archives/tar/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yml b/distribution/archives/tar/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yml deleted file mode 100644 index da68232f8d8fb..0000000000000 --- a/distribution/archives/tar/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yml +++ /dev/null @@ -1,13 +0,0 @@ -# Integration tests for distributions with modules -# -"Correct Modules Count": - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - - length: { nodes.$master.modules: ${expected.modules.count} } diff --git a/distribution/archives/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java b/distribution/archives/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java deleted file mode 100644 index dc08af2528b52..0000000000000 --- a/distribution/archives/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ -public class ZipClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - public ZipClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return createParameters(); - } -} diff --git a/distribution/archives/zip/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yml b/distribution/archives/zip/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yml deleted file mode 100644 index da68232f8d8fb..0000000000000 --- a/distribution/archives/zip/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_modules.yml +++ /dev/null @@ -1,13 +0,0 @@ -# Integration tests for distributions with modules -# -"Correct Modules Count": - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - - length: { nodes.$master.modules: ${expected.modules.count} } From 56e0cc8bce57408ace9e4ca3b4f976b80b642715 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 28 Feb 2018 10:06:47 -0800 Subject: [PATCH 10/89] percolator: Take `matchAllDocs` and `verified` of the sub result into account when analyzing a function_score query. Before the `matchAllDocs` was ignored and this could lead to percolator queries not matching when the inner query was a match_all query and min_score was specified. Before when `verified` was not taken into account if the function_score query wrapped an unverified query this could lead to matching percolator queries that shouldn't match at all. --- .../percolator/QueryAnalyzer.java | 9 ++++- .../percolator/CandidateQueryTests.java | 37 +++++++++++++++++++ .../percolator/QueryAnalyzerTests.java | 18 +++++++++ 3 files changed, 62 insertions(+), 2 deletions(-) diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 1f0b2c0a715cc..d2d18dd3f5f2b 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -464,12 +464,17 @@ private static BiFunction functionScoreQuery() { return (query, version) -> { FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) query; Result result = analyze(functionScoreQuery.getSubQuery(), version); + // If min_score is specified we can't guarantee upfront that this percolator query matches, // so in that case we set verified to false. // (if it matches with the percolator document matches with the extracted terms. // Min score filters out docs, which is different than the functions, which just influences the score.) - boolean verified = functionScoreQuery.getMinScore() == null; - return new Result(verified, result.extractions, result.minimumShouldMatch); + boolean verified = result.verified && functionScoreQuery.getMinScore() == null; + if (result.matchAllDocs) { + return new Result(result.matchAllDocs, verified); + } else { + return new Result(verified, result.extractions, result.minimumShouldMatch); + } }; } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 9bf03dd1daec2..762c753ba392e 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -55,6 +55,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; @@ -77,6 +78,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -220,6 +222,16 @@ public void testDuel() throws Exception { } return new DisjunctionMaxQuery(clauses, 0.01f); }); + queryFunctions.add(() -> { + Float minScore = randomBoolean() ? null : (float) randomIntBetween(1, 1000); + Query innerQuery; + if (randomBoolean()) { + innerQuery = new TermQuery(new Term(field1, randomFrom(stringContent.get(field1)))); + } else { + innerQuery = new PhraseQuery(field1, randomFrom(stringContent.get(field1)), randomFrom(stringContent.get(field1))); + } + return new FunctionScoreQuery(innerQuery, minScore, 1f); + }); List documents = new ArrayList<>(); for (Supplier queryFunction : queryFunctions) { @@ -679,6 +691,31 @@ public void testPercolateMatchAll() throws Exception { assertEquals(4, topDocs.scoreDocs[2].doc); } + public void testFunctionScoreQuery() throws Exception { + List docs = new ArrayList<>(); + addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), null, 1f), docs); + addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), 10f, 1f), docs); + addQuery(new FunctionScoreQuery(new MatchAllDocsQuery(), null, 1f), docs); + addQuery(new FunctionScoreQuery(new MatchAllDocsQuery(), 10F, 1f), docs); + + indexWriter.addDocuments(docs); + indexWriter.close(); + directoryReader = DirectoryReader.open(directory); + IndexSearcher shardSearcher = newSearcher(directoryReader); + shardSearcher.setQueryCache(null); + + MemoryIndex memoryIndex = new MemoryIndex(); + memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); + IndexSearcher percolateSearcher = memoryIndex.createSearcher(); + PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, + Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT); + TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC), true, true); + assertEquals(2L, topDocs.totalHits); + assertEquals(2, topDocs.scoreDocs.length); + assertEquals(0, topDocs.scoreDocs[0].doc); + assertEquals(2, topDocs.scoreDocs[1].doc); + } + public void testPercolateSmallAndLargeDocument() throws Exception { List docs = new ArrayList<>(); BooleanQuery.Builder builder = new BooleanQuery.Builder(); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index 5b382eb7654bd..5968f8c3f8327 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -811,6 +811,24 @@ public void testFunctionScoreQuery() { assertTermsEqual(result.extractions, new Term("_field", "_value")); } + public void testFunctionScoreQuery_withMatchAll() { + MatchAllDocsQuery innerQuery = new MatchAllDocsQuery(); + FunctionScoreQuery functionScoreQuery1 = new FunctionScoreQuery(innerQuery, new RandomScoreFunction(0, 0, null)); + Result result = analyze(functionScoreQuery1, Version.CURRENT); + assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(0)); + assertThat(result.matchAllDocs, is(true)); + assertThat(result.extractions.isEmpty(), is(true)); + + FunctionScoreQuery functionScoreQuery2 = + new FunctionScoreQuery(innerQuery, new RandomScoreFunction(0, 0, null), CombineFunction.MULTIPLY, 1f, 10f); + result = analyze(functionScoreQuery2, Version.CURRENT); + assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(0)); + assertThat(result.matchAllDocs, is(true)); + assertThat(result.extractions.isEmpty(), is(true)); + } + public void testSelectBestExtraction() { Set queryTerms1 = terms(new int[0], "12", "1234", "12345"); Set queryTerms2 = terms(new int[0], "123", "1234", "12345"); From 2e01b25dd2e14a96cea9d2fe140f06c0bd57e767 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 8 Mar 2018 22:49:27 -0800 Subject: [PATCH 11/89] Plugins: Consolidate plugin and module loading code (#28815) At one point, modules and plugins were very different. But effectively now they are the same, just from different directories. This commit unifies the loading methods so they are simply two different directories. Note that the main codepath to load plugin bundles had duplication (was not calling getPluginBundles) since previous refactorings to add meta plugins. Note this change also rewords the primary exception message when a plugin descriptor is missing, as the wording asking if the plugin was built before 2.0 isn't really applicable anymore (it is highly unlikely someone tries to install a 1.x plugin on any modern version). --- .../elasticsearch/plugins/PluginsService.java | 62 +++++++------------ .../plugins/PluginsServiceTests.java | 13 ++-- 2 files changed, 28 insertions(+), 47 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index 3ed4374ca2ac3..766d171752c16 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -140,7 +140,8 @@ public PluginsService(Settings settings, Path configPath, Path modulesDirectory, // TODO: remove this leniency, but tests bogusly rely on it if (isAccessibleDirectory(pluginsDirectory, logger)) { checkForFailedPluginRemovals(pluginsDirectory); - List plugins = getPluginBundleCollections(pluginsDirectory); + // call findBundles directly to get the meta plugin names + List plugins = findBundles(pluginsDirectory, "plugin"); for (final BundleCollection plugin : plugins) { final Collection bundles = plugin.bundles(); for (final Bundle bundle : bundles) { @@ -173,8 +174,9 @@ public PluginsService(Settings settings, Path configPath, Path modulesDirectory, if (!missingPlugins.isEmpty()) { final String message = String.format( Locale.ROOT, - "missing mandatory plugins [%s]", - Strings.collectionToDelimitedString(missingPlugins, ", ")); + "missing mandatory plugins [%s], found plugins [%s]", + Strings.collectionToDelimitedString(missingPlugins, ", "), + Strings.collectionToDelimitedString(pluginsNames, ", ")); throw new IllegalStateException(message); } } @@ -400,25 +402,6 @@ static void verifyCompatibility(PluginInfo info) { JarHell.checkJavaVersion(info.getName(), info.getJavaVersion()); } - // similar in impl to getPluginBundles, but DO NOT try to make them share code. - // we don't need to inherit all the leniency, and things are different enough. - static Set getModuleBundles(Path modulesDirectory) throws IOException { - // damn leniency - if (Files.notExists(modulesDirectory)) { - return Collections.emptySet(); - } - Set bundles = new LinkedHashSet<>(); - try (DirectoryStream stream = Files.newDirectoryStream(modulesDirectory)) { - for (Path module : stream) { - PluginInfo info = PluginInfo.readFromProperties(module); - if (bundles.add(new Bundle(info, module)) == false) { - throw new IllegalStateException("duplicate module: " + info); - } - } - } - return bundles; - } - static void checkForFailedPluginRemovals(final Path pluginsDirectory) throws IOException { /* * Check for the existence of a marker file that indicates any plugins are in a garbage state from a failed attempt to remove the @@ -440,29 +423,29 @@ static void checkForFailedPluginRemovals(final Path pluginsDirectory) throws IOE } } - /** - * Get the plugin bundles from the specified directory. - * - * @param pluginsDirectory the directory - * @return the set of plugin bundles in the specified directory - * @throws IOException if an I/O exception occurs reading the plugin bundles - */ + /** Get bundles for plugins installed in the given modules directory. */ + static Set getModuleBundles(Path modulesDirectory) throws IOException { + return findBundles(modulesDirectory, "module").stream().flatMap(b -> b.bundles().stream()).collect(Collectors.toSet()); + } + + /** Get bundles for plugins installed in the given plugins directory. */ static Set getPluginBundles(final Path pluginsDirectory) throws IOException { - return getPluginBundleCollections(pluginsDirectory).stream().flatMap(b -> b.bundles().stream()).collect(Collectors.toSet()); + return findBundles(pluginsDirectory, "plugin").stream().flatMap(b -> b.bundles().stream()).collect(Collectors.toSet()); } - private static List getPluginBundleCollections(final Path pluginsDirectory) throws IOException { + // searches subdirectories under the given directory for plugin directories + private static List findBundles(final Path directory, String type) throws IOException { final List bundles = new ArrayList<>(); final Set seenBundles = new HashSet<>(); - final Tuple, Map>> groupedPluginDirs = findGroupedPluginDirs(pluginsDirectory); + final Tuple, Map>> groupedPluginDirs = findGroupedPluginDirs(directory); for (final Path plugin : groupedPluginDirs.v1()) { - final Bundle bundle = bundle(seenBundles, plugin); + final Bundle bundle = readPluginBundle(seenBundles, plugin, type); bundles.add(bundle); } for (final Map.Entry> metaPlugin : groupedPluginDirs.v2().entrySet()) { final List metaPluginBundles = new ArrayList<>(); for (final Path metaPluginPlugin : metaPlugin.getValue()) { - final Bundle bundle = bundle(seenBundles, metaPluginPlugin); + final Bundle bundle = readPluginBundle(seenBundles, metaPluginPlugin, type); metaPluginBundles.add(bundle); } final MetaBundle metaBundle = new MetaBundle(metaPlugin.getKey(), metaPluginBundles); @@ -472,18 +455,19 @@ private static List getPluginBundleCollections(final Path plug return bundles; } - private static Bundle bundle(final Set bundles, final Path plugin) throws IOException { - Loggers.getLogger(PluginsService.class).trace("--- adding plugin [{}]", plugin.toAbsolutePath()); + // get a bundle for a single plugin dir + private static Bundle readPluginBundle(final Set bundles, final Path plugin, String type) throws IOException { + Loggers.getLogger(PluginsService.class).trace("--- adding [{}] [{}]", type, plugin.toAbsolutePath()); final PluginInfo info; try { info = PluginInfo.readFromProperties(plugin); } catch (final IOException e) { - throw new IllegalStateException("Could not load plugin descriptor for existing plugin [" - + plugin.getFileName() + "]. Was the plugin built before 2.0?", e); + throw new IllegalStateException("Could not load plugin descriptor for " + type + + " directory [" + plugin.getFileName() + "]", e); } final Bundle bundle = new Bundle(info, plugin); if (bundles.add(bundle) == false) { - throw new IllegalStateException("duplicate plugin: " + info); + throw new IllegalStateException("duplicate " + type + ": " + info); } return bundle; } diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 36e1266c51118..4f0a73ca44ca6 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -107,12 +107,9 @@ public void testAdditionalSettingsClash() { public void testExistingPluginMissingDescriptor() throws Exception { Path pluginsDir = createTempDir(); Files.createDirectory(pluginsDir.resolve("plugin-missing-descriptor")); - try { - PluginsService.getPluginBundles(pluginsDir); - fail(); - } catch (IllegalStateException e) { - assertTrue(e.getMessage(), e.getMessage().contains("Could not load plugin descriptor for existing plugin [plugin-missing-descriptor]")); - } + IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.getPluginBundles(pluginsDir)); + assertThat(e.getMessage(), + containsString("Could not load plugin descriptor for plugin directory [plugin-missing-descriptor]")); } public void testFilterPlugins() { @@ -139,7 +136,7 @@ public void testHiddenFiles() throws IOException { IllegalStateException.class, () -> newPluginsService(settings)); - final String expected = "Could not load plugin descriptor for existing plugin [.hidden]"; + final String expected = "Could not load plugin descriptor for plugin directory [.hidden]"; assertThat(e, hasToString(containsString(expected))); } @@ -158,7 +155,7 @@ public void testDesktopServicesStoreFiles() throws IOException { assertNotNull(pluginsService); } else { final IllegalStateException e = expectThrows(IllegalStateException.class, () -> newPluginsService(settings)); - assertThat(e, hasToString(containsString("Could not load plugin descriptor for existing plugin [.DS_Store]"))); + assertThat(e.getMessage(), containsString("Could not load plugin descriptor for plugin directory [.DS_Store]")); assertNotNull(e.getCause()); assertThat(e.getCause(), instanceOf(FileSystemException.class)); if (Constants.WINDOWS) { From 60defeccf81cccf8caa0441acf9867930f3b033a Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 9 Mar 2018 09:37:54 +0100 Subject: [PATCH 12/89] Fixed incorrect test try-catch statement --- .../java/org/elasticsearch/ingest/IngestClientIT.java | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 809a81b687e80..758a6d9a02706 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -51,6 +51,7 @@ import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; @@ -268,13 +269,9 @@ public void testPutWithPipelineFactoryError() throws Exception { .endArray() .endObject().bytes(); PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); - try { - client().admin().cluster().putPipeline(putPipelineRequest).get(); - } catch (ExecutionException e) { - ElasticsearchParseException ex = (ElasticsearchParseException) ExceptionsHelper.unwrap(e, ElasticsearchParseException.class); - assertNotNull(ex); - assertThat(ex.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]")); - } + Exception e = expectThrows(ElasticsearchParseException.class, + () -> client().admin().cluster().putPipeline(putPipelineRequest).actionGet()); + assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]")); GetPipelineResponse response = client().admin().cluster().prepareGetPipeline("_id").get(); assertFalse(response.isFound()); From e0da114fc9c58b956cd373d041ec75309ad139c8 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 9 Mar 2018 09:42:44 +0000 Subject: [PATCH 13/89] Use String.join() to describe a list of tasks (#28941) This change replaces the use of string concatenation with a call to String.join(). String concatenation might be quadratic, unless the compiler can optimise it away, whereas String.join() is more reliably linear. There can sometimes be a large number of pending ClusterState update tasks and #28920 includes a report that this operation sometimes takes a long time. --- .../cluster/ClusterStateTaskExecutor.java | 11 ++--------- .../cluster/metadata/MetaDataMappingService.java | 2 +- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java index 8e50fddb9b17e..e30f02ad4060d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -23,6 +23,7 @@ import java.util.IdentityHashMap; import java.util.List; import java.util.Map; +import java.util.stream.Stream; public interface ClusterStateTaskExecutor { /** @@ -55,15 +56,7 @@ default void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { * This allows groupd task description but the submitting source. */ default String describeTasks(List tasks) { - return tasks.stream().map(T::toString).reduce((s1,s2) -> { - if (s1.isEmpty()) { - return s2; - } else if (s2.isEmpty()) { - return s1; - } else { - return s1 + ", " + s2; - } - }).orElse(""); + return String.join(", ", tasks.stream().map(t -> (CharSequence)t.toString()).filter(t -> t.length() == 0)::iterator); } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 12a56f00bd4f0..fbe941f5021fb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -352,7 +352,7 @@ private ClusterState applyRequest(ClusterState currentState, PutMappingClusterSt @Override public String describeTasks(List tasks) { - return tasks.stream().map(PutMappingClusterStateUpdateRequest::type).reduce((s1, s2) -> s1 + ", " + s2).orElse(""); + return String.join(", ", tasks.stream().map(t -> (CharSequence)t.type())::iterator); } } From 12296990f3996f0d2b2ed6e3460fdca128c67a4c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 9 Mar 2018 14:28:45 +0100 Subject: [PATCH 14/89] Use different pipeline id in test. (pipelines do not get removed between tests extending from ESIntegTestCase) --- .../test/java/org/elasticsearch/ingest/IngestClientIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 758a6d9a02706..d27b05d1e7b29 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -268,12 +268,12 @@ public void testPutWithPipelineFactoryError() throws Exception { .endObject() .endArray() .endObject().bytes(); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); + PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id2", source, XContentType.JSON); Exception e = expectThrows(ElasticsearchParseException.class, () -> client().admin().cluster().putPipeline(putPipelineRequest).actionGet()); assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]")); - GetPipelineResponse response = client().admin().cluster().prepareGetPipeline("_id").get(); + GetPipelineResponse response = client().admin().cluster().prepareGetPipeline("_id2").get(); assertFalse(response.isFound()); } } From 240aa34cb4fab36229f308d3fe7dcdb641a30662 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Thu, 22 Feb 2018 15:40:20 +0100 Subject: [PATCH 15/89] Use client settings in repository-gcs (#28575) Similarly to what has been done for s3 and azure, this commit removes the repository settings `application_name` and `connect/read_timeout` in favor of client settings. It introduce a GoogleCloudStorageClientSettings class (similar to S3ClientSettings) and a bunch of unit tests for that, it aligns the documentation to be more coherent with the S3 one, it documents the connect/read timeouts that were not documented at all and also adds a new client setting that allows to define a custom endpoint. --- docs/plugins/repository-gcs.asciidoc | 99 ++++++-- docs/plugins/repository-s3.asciidoc | 2 +- docs/reference/migration/index.asciidoc | 2 + docs/reference/migration/migrate_6_3.asciidoc | 12 + .../gcs/GoogleCloudStorageClientSettings.java | 172 ++++++++++++++ .../gcs/GoogleCloudStoragePlugin.java | 37 +-- .../gcs/GoogleCloudStorageRepository.java | 41 +++- .../gcs/GoogleCloudStorageService.java | 213 ++++++++---------- ...eCloudStorageBlobStoreRepositoryTests.java | 25 +- ...GoogleCloudStorageClientSettingsTests.java | 197 ++++++++++++++++ ...loudStorageRepositoryDeprecationTests.java | 57 +++++ .../gcs/GoogleCloudStorageServiceTests.java | 74 +++--- .../src/test/resources/dummy-account.json | 12 - 13 files changed, 719 insertions(+), 224 deletions(-) create mode 100644 docs/reference/migration/migrate_6_3.asciidoc create mode 100644 plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryDeprecationTests.java delete mode 100644 plugins/repository-gcs/src/test/resources/dummy-account.json diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc index 22186a8e079be..8b9a742277563 100644 --- a/docs/plugins/repository-gcs.asciidoc +++ b/docs/plugins/repository-gcs.asciidoc @@ -116,23 +116,75 @@ PUT _snapshot/my_gcs_repository // CONSOLE // TEST[skip:we don't have gcs setup while testing this] -[[repository-gcs-bucket-permission]] -===== Set Bucket Permission +[[repository-gcs-client]] +==== Client Settings -The service account used to access the bucket must have the "Writer" access to the bucket: +The client used to connect to Google Cloud Storage has a number of settings available. +Client setting names are of the form `gcs.client.CLIENT_NAME.SETTING_NAME` and specified +inside `elasticsearch.yml`. The default client name looked up by a `gcs` repository is +called `default`, but can be customized with the repository setting `client`. -1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console] -2. Select your project -3. Got to the https://console.cloud.google.com/storage/browser[Storage Browser] -4. Select the bucket and "Edit bucket permission" -5. The service account must be configured as a "User" with "Writer" access +For example: + +[source,js] +---- +PUT _snapshot/my_gcs_repository +{ + "type": "gcs", + "settings": { + "bucket": "my_bucket", + "client": "my_alternate_client" + } +} +---- +// CONSOLE +// TEST[skip:we don't have gcs setup while testing this] + +Some settings are sensitive and must be stored in the +{ref}/secure-settings.html[elasticsearch keystore]. This is the case for the service account file: + +[source,sh] +---- +bin/elasticsearch-keystore add-file gcs.client.default.credentials_file +---- + +The following are the available client settings. Those that must be stored in the keystore +are marked as `Secure`. +`credentials_file`:: + + The service account file that is used to authenticate to the Google Cloud Storage service. (Secure) + +`endpoint`:: + + The Google Cloud Storage service endpoint to connect to. This will be automatically + determined by the Google Cloud Storage client but can be specified explicitly. + +`connect_timeout`:: + + The timeout to establish a connection to the Google Cloud Storage service. The value should + specify the unit. For example, a value of `5s` specifies a 5 second timeout. The value of `-1` + corresponds to an infinite timeout. The default value is 20 seconds. + +`read_timeout`:: + + The timeout to read data from an established connection. The value should + specify the unit. For example, a value of `5s` specifies a 5 second timeout. The value of `-1` + corresponds to an infinite timeout. The default value is 20 seconds. + +`application_name`:: + + Name used by the client when it uses the Google Cloud Storage service. Setting + a custom name can be useful to authenticate your cluster when requests + statistics are logged in the Google Cloud Platform. Default to `repository-gcs` [[repository-gcs-repository]] -==== Create a Repository +==== Repository Settings + +The `gcs` repository type supports a number of settings to customize how data +is stored in Google Cloud Storage. -Once everything is installed and every node is started, you can create a new repository that -uses Google Cloud Storage to store snapshots: +These can be specified when creating the repository. For example: [source,js] ---- @@ -140,12 +192,13 @@ PUT _snapshot/my_gcs_repository { "type": "gcs", "settings": { - "bucket": "my_bucket" + "bucket": "my_other_bucket", + "base_path": "dev" } } ---- // CONSOLE -// TEST[skip:we don't have gcs setup while testing this] +// TEST[skip:we don't have gcs set up while testing this] The following settings are supported: @@ -155,8 +208,8 @@ The following settings are supported: `client`:: - The client congfiguration to use. This controls which credentials are used to connect - to Compute Engine. + The name of the client to use to connect to Google Cloud Storage. + Defaults to `default`. `base_path`:: @@ -177,6 +230,16 @@ The following settings are supported: `application_name`:: - Name used by the plugin when it uses the Google Cloud JSON API. Setting - a custom name can be useful to authenticate your cluster when requests - statistics are logged in the Google Cloud Platform. Default to `repository-gcs` + deprecated[6.3.0, This setting is now defined in the <>] + Name used by the client when it uses the Google Cloud Storage service. + +[[repository-gcs-bucket-permission]] +===== Recommended Bucket Permission + +The service account used to access the bucket must have the "Writer" access to the bucket: + +1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console] +2. Select your project +3. Got to the https://console.cloud.google.com/storage/browser[Storage Browser] +4. Select the bucket and "Edit bucket permission" +5. The service account must be configured as a "User" with "Writer" access diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index eb0828e96c62c..bff64ebdc9186 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -36,7 +36,7 @@ PUT _snapshot/my_s3_repository The client used to connect to S3 has a number of settings available. Client setting names are of the form `s3.client.CLIENT_NAME.SETTING_NAME` and specified inside `elasticsearch.yml`. The -default client name looked up by an s3 repository is called `default`, but can be customized +default client name looked up by a `s3` repository is called `default`, but can be customized with the repository setting `client`. For example: [source,js] diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index cc3dac22f6c36..b60dff09974b0 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -24,3 +24,5 @@ include::migrate_6_0.asciidoc[] include::migrate_6_1.asciidoc[] include::migrate_6_2.asciidoc[] + +include::migrate_6_3.asciidoc[] diff --git a/docs/reference/migration/migrate_6_3.asciidoc b/docs/reference/migration/migrate_6_3.asciidoc new file mode 100644 index 0000000000000..653c99d2a338f --- /dev/null +++ b/docs/reference/migration/migrate_6_3.asciidoc @@ -0,0 +1,12 @@ +[[breaking-changes-6.3]] +== Breaking changes in 6.3 + +[[breaking_63_plugins_changes]] +=== Plugins changes + +==== GCS Repository plugin + +* The repository settings `application_name`, `connect_timeout` and `read_timeout` have been deprecated and +must now be specified in the client settings instead. + +See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Storage Client Settings]. diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java new file mode 100644 index 0000000000000..68143b48ba374 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -0,0 +1,172 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.gcs; + +import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; +import com.google.api.services.storage.StorageScopes; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.common.settings.Setting.timeSetting; + +/** + * Container for Google Cloud Storage clients settings. + */ +public class GoogleCloudStorageClientSettings { + + private static final String PREFIX = "gcs.client."; + + /** A json Service Account file loaded from secure settings. */ + static final Setting.AffixSetting CREDENTIALS_FILE_SETTING = Setting.affixKeySetting(PREFIX, "credentials_file", + key -> SecureSetting.secureFile(key, null)); + + /** An override for the Storage endpoint to connect to. */ + static final Setting.AffixSetting ENDPOINT_SETTING = Setting.affixKeySetting(PREFIX, "endpoint", + key -> new Setting<>(key, "", s -> s, Setting.Property.NodeScope)); + + /** + * The timeout to establish a connection. A value of {@code -1} corresponds to an infinite timeout. A value of {@code 0} + * corresponds to the default timeout of the Google Cloud Storage Java Library. + */ + static final Setting.AffixSetting CONNECT_TIMEOUT_SETTING = Setting.affixKeySetting(PREFIX, "connect_timeout", + key -> timeSetting(key, TimeValue.ZERO, TimeValue.MINUS_ONE, Setting.Property.NodeScope)); + + /** + * The timeout to read data from an established connection. A value of {@code -1} corresponds to an infinite timeout. A value of + * {@code 0} corresponds to the default timeout of the Google Cloud Storage Java Library. + */ + static final Setting.AffixSetting READ_TIMEOUT_SETTING = Setting.affixKeySetting(PREFIX, "read_timeout", + key -> timeSetting(key, TimeValue.ZERO, TimeValue.MINUS_ONE, Setting.Property.NodeScope)); + + /** Name used by the client when it uses the Google Cloud JSON API. **/ + static final Setting.AffixSetting APPLICATION_NAME_SETTING = Setting.affixKeySetting(PREFIX, "application_name", + key -> new Setting<>(key, "repository-gcs", s -> s, Setting.Property.NodeScope)); + + /** The credentials used by the client to connect to the Storage endpoint **/ + private final GoogleCredential credential; + + /** The Storage root URL the client should talk to, or empty string to use the default. **/ + private final String endpoint; + + /** The timeout to establish a connection **/ + private final TimeValue connectTimeout; + + /** The timeout to read data from an established connection **/ + private final TimeValue readTimeout; + + /** The Storage client application name **/ + private final String applicationName; + + GoogleCloudStorageClientSettings(final GoogleCredential credential, + final String endpoint, + final TimeValue connectTimeout, + final TimeValue readTimeout, + final String applicationName) { + this.credential = credential; + this.endpoint = endpoint; + this.connectTimeout = connectTimeout; + this.readTimeout = readTimeout; + this.applicationName = applicationName; + } + + public GoogleCredential getCredential() { + return credential; + } + + public String getEndpoint() { + return endpoint; + } + + public TimeValue getConnectTimeout() { + return connectTimeout; + } + + public TimeValue getReadTimeout() { + return readTimeout; + } + + public String getApplicationName() { + return applicationName; + } + + public static Map load(final Settings settings) { + final Map clients = new HashMap<>(); + for (String clientName: settings.getGroups(PREFIX).keySet()) { + clients.put(clientName, getClientSettings(settings, clientName)); + } + if (clients.containsKey("default") == false) { + // this won't find any settings under the default client, + // but it will pull all the fallback static settings + clients.put("default", getClientSettings(settings, "default")); + } + return Collections.unmodifiableMap(clients); + } + + static GoogleCloudStorageClientSettings getClientSettings(final Settings settings, final String clientName) { + return new GoogleCloudStorageClientSettings( + loadCredential(settings, clientName), + getConfigValue(settings, clientName, ENDPOINT_SETTING), + getConfigValue(settings, clientName, CONNECT_TIMEOUT_SETTING), + getConfigValue(settings, clientName, READ_TIMEOUT_SETTING), + getConfigValue(settings, clientName, APPLICATION_NAME_SETTING) + ); + } + + /** + * Loads the service account file corresponding to a given client name. If no file is defined for the client, + * a {@code null} credential is returned. + * + * @param settings the {@link Settings} + * @param clientName the client name + * + * @return the {@link GoogleCredential} to use for the given client, {@code null} if no service account is defined. + */ + static GoogleCredential loadCredential(final Settings settings, final String clientName) { + try { + if (CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).exists(settings) == false) { + // explicitly returning null here so that the default credential + // can be loaded later when creating the Storage client + return null; + } + try (InputStream credStream = CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).get(settings)) { + GoogleCredential credential = GoogleCredential.fromStream(credStream); + if (credential.createScopedRequired()) { + credential = credential.createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); + } + return credential; + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private static T getConfigValue(final Settings settings, final String clientName, final Setting.AffixSetting clientSetting) { + Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); + return concreteSetting.get(settings); + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index fa43ab1bc3fcc..ef24cd959e55b 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -19,15 +19,8 @@ package org.elasticsearch.repositories.gcs; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Collections; -import java.util.List; -import java.util.Map; - import com.google.api.client.auth.oauth2.TokenRequest; import com.google.api.client.auth.oauth2.TokenResponse; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpHeaders; @@ -48,12 +41,15 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; -import org.elasticsearch.repositories.gcs.GoogleCloudStorageRepository; -import org.elasticsearch.repositories.gcs.GoogleCloudStorageService; -public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; - public static final String NAME = "repository-gcs"; +public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { static { /* @@ -112,15 +108,19 @@ public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin }); } - private final Map credentials; + private final Map clientsSettings; + + public GoogleCloudStoragePlugin(final Settings settings) { + clientsSettings = GoogleCloudStorageClientSettings.load(settings); + } - public GoogleCloudStoragePlugin(Settings settings) { - credentials = GoogleCloudStorageService.loadClientCredentials(settings); + protected Map getClientsSettings() { + return clientsSettings; } // overridable for tests protected GoogleCloudStorageService createStorageService(Environment environment) { - return new GoogleCloudStorageService.InternalGoogleCloudStorageService(environment, credentials); + return new GoogleCloudStorageService(environment, clientsSettings); } @Override @@ -131,6 +131,11 @@ public Map getRepositories(Environment env, NamedXCo @Override public List> getSettings() { - return Collections.singletonList(GoogleCloudStorageService.CREDENTIALS_FILE_SETTING); + return Arrays.asList( + GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING, + GoogleCloudStorageClientSettings.ENDPOINT_SETTING, + GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING, + GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING, + GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING); } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index d435f001448c7..1c1fabcdb9f26 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -20,10 +20,13 @@ package org.elasticsearch.repositories.gcs; import com.google.api.services.storage.Storage; +import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -44,6 +47,9 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { + private final Logger logger = ESLoggerFactory.getLogger(GoogleCloudStorageRepository.class); + private final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); + // package private for testing static final ByteSizeValue MIN_CHUNK_SIZE = new ByteSizeValue(1, ByteSizeUnit.BYTES); static final ByteSizeValue MAX_CHUNK_SIZE = new ByteSizeValue(100, ByteSizeUnit.MB); @@ -60,11 +66,17 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { boolSetting("compress", false, Property.NodeScope, Property.Dynamic); static final Setting CHUNK_SIZE = byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE, Property.NodeScope, Property.Dynamic); - static final Setting APPLICATION_NAME = - new Setting<>("application_name", GoogleCloudStoragePlugin.NAME, Function.identity(), Property.NodeScope, Property.Dynamic); static final Setting CLIENT_NAME = new Setting<>("client", "default", Function.identity()); + + @Deprecated + static final Setting APPLICATION_NAME = + new Setting<>("application_name", "", Function.identity(), Property.NodeScope, Property.Dynamic); + + @Deprecated static final Setting HTTP_READ_TIMEOUT = timeSetting("http.read_timeout", NO_TIMEOUT, Property.NodeScope, Property.Dynamic); + + @Deprecated static final Setting HTTP_CONNECT_TIMEOUT = timeSetting("http.connect_timeout", NO_TIMEOUT, Property.NodeScope, Property.Dynamic); @@ -79,9 +91,7 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { super(metadata, environment.settings(), namedXContentRegistry); String bucket = getSetting(BUCKET, metadata); - String application = getSetting(APPLICATION_NAME, metadata); String clientName = CLIENT_NAME.get(metadata.settings()); - String basePath = BASE_PATH.get(metadata.settings()); if (Strings.hasLength(basePath)) { BlobPath path = new BlobPath(); @@ -93,33 +103,40 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { this.basePath = BlobPath.cleanPath(); } + this.compress = getSetting(COMPRESS, metadata); + this.chunkSize = getSetting(CHUNK_SIZE, metadata); + + logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}]", bucket, basePath, chunkSize, compress); + + String application = APPLICATION_NAME.get(metadata.settings()); + if (Strings.hasText(application)) { + deprecationLogger.deprecated("Setting [application_name] in repository settings is deprecated, " + + "it must be specified in the client settings instead"); + } TimeValue connectTimeout = null; TimeValue readTimeout = null; TimeValue timeout = HTTP_CONNECT_TIMEOUT.get(metadata.settings()); if ((timeout != null) && (timeout.millis() != NO_TIMEOUT.millis())) { + deprecationLogger.deprecated("Setting [http.connect_timeout] in repository settings is deprecated, " + + "it must be specified in the client settings instead"); connectTimeout = timeout; } - timeout = HTTP_READ_TIMEOUT.get(metadata.settings()); if ((timeout != null) && (timeout.millis() != NO_TIMEOUT.millis())) { + deprecationLogger.deprecated("Setting [http.read_timeout] in repository settings is deprecated, " + + "it must be specified in the client settings instead"); readTimeout = timeout; } - this.compress = getSetting(COMPRESS, metadata); - this.chunkSize = getSetting(CHUNK_SIZE, metadata); - - logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}], application [{}]", - bucket, basePath, chunkSize, compress, application); - TimeValue finalConnectTimeout = connectTimeout; TimeValue finalReadTimeout = readTimeout; + Storage client = SocketAccess.doPrivilegedIOException(() -> storageService.createClient(clientName, application, finalConnectTimeout, finalReadTimeout)); this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, client); } - @Override protected BlobStore blobStore() { return blobStore; diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 7625ae7845060..f4d80c9e90453 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -25,156 +25,137 @@ import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; import com.google.api.client.http.HttpRequest; import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpTransport; import com.google.api.client.http.HttpUnsuccessfulResponseHandler; -import com.google.api.client.http.javanet.NetHttpTransport; import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.client.util.ExponentialBackOff; import com.google.api.services.storage.Storage; -import com.google.api.services.storage.StorageScopes; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.SecureSetting; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.env.Environment; import java.io.IOException; -import java.io.InputStream; -import java.io.UncheckedIOException; -import java.util.Collections; -import java.util.HashMap; import java.util.Map; -import java.util.Set; -interface GoogleCloudStorageService { +public class GoogleCloudStorageService extends AbstractComponent { - /** A json credentials file loaded from secure settings. */ - Setting.AffixSetting CREDENTIALS_FILE_SETTING = Setting.affixKeySetting("gcs.client.", "credentials_file", - key -> SecureSetting.secureFile(key, null)); + /** Clients settings identified by client name. */ + private final Map clientsSettings; + + public GoogleCloudStorageService(final Environment environment, final Map clientsSettings) { + super(environment.settings()); + this.clientsSettings = clientsSettings; + } /** * Creates a client that can be used to manage Google Cloud Storage objects. * - * @param clientName name of client settings to use from secure settings - * @param application name of the application - * @param connectTimeout connection timeout for HTTP requests - * @param readTimeout read timeout for HTTP requests - * @return a Client instance that can be used to manage objects - */ - Storage createClient(String clientName, String application, - TimeValue connectTimeout, TimeValue readTimeout) throws Exception; - - /** - * Default implementation + * @param clientName name of client settings to use from secure settings + * @return a Client instance that can be used to manage Storage objects */ - class InternalGoogleCloudStorageService extends AbstractComponent implements GoogleCloudStorageService { + public Storage createClient(final String clientName, + final String application, + final TimeValue connectTimeout, + final TimeValue readTimeout) throws Exception { + + final GoogleCloudStorageClientSettings clientSettings = clientsSettings.get(clientName); + if (clientSettings == null) { + throw new IllegalArgumentException("Unknown client name [" + clientName + "]. Existing client configs: " + + Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); + } - /** Credentials identified by client name. */ - private final Map credentials; + HttpTransport transport = GoogleNetHttpTransport.newTrustedTransport(); + HttpRequestInitializer requestInitializer = createRequestInitializer(clientSettings, connectTimeout, readTimeout); - InternalGoogleCloudStorageService(Environment environment, Map credentials) { - super(environment.settings()); - this.credentials = credentials; + Storage.Builder storage = new Storage.Builder(transport, JacksonFactory.getDefaultInstance(), requestInitializer); + if (Strings.hasText(application)) { + storage.setApplicationName(application); + } else if (Strings.hasLength(clientSettings.getApplicationName())) { + storage.setApplicationName(clientSettings.getApplicationName()); } - - @Override - public Storage createClient(String clientName, String application, - TimeValue connectTimeout, TimeValue readTimeout) throws Exception { - try { - GoogleCredential credential = getCredential(clientName); - NetHttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport(); - - Storage.Builder storage = new Storage.Builder(httpTransport, JacksonFactory.getDefaultInstance(), - new DefaultHttpRequestInitializer(credential, connectTimeout, readTimeout)); - storage.setApplicationName(application); - - logger.debug("initializing client with service account [{}/{}]", - credential.getServiceAccountId(), credential.getServiceAccountUser()); - return storage.build(); - } catch (IOException e) { - throw new ElasticsearchException("Error when loading Google Cloud Storage credentials file", e); - } + if (Strings.hasLength(clientSettings.getEndpoint())) { + storage.setRootUrl(clientSettings.getEndpoint()); } + return storage.build(); + } - // pkg private for tests - GoogleCredential getCredential(String clientName) throws IOException { - GoogleCredential cred = credentials.get(clientName); - if (cred != null) { - return cred; - } - return getDefaultCredential(); + static HttpRequestInitializer createRequestInitializer(final GoogleCloudStorageClientSettings settings, + final TimeValue deprecatedConnectTimeout, + final TimeValue deprecatedReadTimeout) throws IOException { + GoogleCredential credential = settings.getCredential(); + if (credential == null) { + credential = GoogleCredential.getApplicationDefault(); } - // pkg private for tests - GoogleCredential getDefaultCredential() throws IOException { - return GoogleCredential.getApplicationDefault(); - } + final Integer connectTimeout = (deprecatedConnectTimeout != null) ? + toTimeout(deprecatedConnectTimeout) : toTimeout(settings.getConnectTimeout()); - /** - * HTTP request initializer that set timeouts and backoff handler while deferring authentication to GoogleCredential. - * See https://cloud.google.com/storage/transfer/create-client#retry - */ - class DefaultHttpRequestInitializer implements HttpRequestInitializer { + final Integer readTimeout = (deprecatedReadTimeout != null) ? + toTimeout(deprecatedReadTimeout) : toTimeout(settings.getReadTimeout()); - private final TimeValue connectTimeout; - private final TimeValue readTimeout; - private final GoogleCredential credential; + return new DefaultHttpRequestInitializer(credential, connectTimeout, readTimeout); + } - DefaultHttpRequestInitializer(GoogleCredential credential, TimeValue connectTimeout, TimeValue readTimeout) { - this.credential = credential; - this.connectTimeout = connectTimeout; - this.readTimeout = readTimeout; - } + /** Converts timeout values from the settings to a timeout value for the Google Cloud SDK **/ + static Integer toTimeout(final TimeValue timeout) { + // Null or zero in settings means the default timeout + if (timeout == null || TimeValue.ZERO.equals(timeout)) { + return null; + } + // -1 means infinite timeout + if (TimeValue.MINUS_ONE.equals(timeout)) { + // 0 is the infinite timeout expected by Google Cloud SDK + return 0; + } + return Math.toIntExact(timeout.getMillis()); + } - @Override - public void initialize(HttpRequest request) throws IOException { - if (connectTimeout != null) { - request.setConnectTimeout((int) connectTimeout.millis()); - } - if (readTimeout != null) { - request.setReadTimeout((int) readTimeout.millis()); - } + /** + * HTTP request initializer that set timeouts and backoff handler while deferring authentication to GoogleCredential. + * See https://cloud.google.com/storage/transfer/create-client#retry + */ + static class DefaultHttpRequestInitializer implements HttpRequestInitializer { - request.setIOExceptionHandler(new HttpBackOffIOExceptionHandler(newBackOff())); - request.setInterceptor(credential); + private final Integer connectTimeout; + private final Integer readTimeout; + private final GoogleCredential credential; - final HttpUnsuccessfulResponseHandler handler = new HttpBackOffUnsuccessfulResponseHandler(newBackOff()); - request.setUnsuccessfulResponseHandler((req, resp, supportsRetry) -> { - // Let the credential handle the response. If it failed, we rely on our backoff handler - return credential.handleResponse(req, resp, supportsRetry) || handler.handleResponse(req, resp, supportsRetry); - } - ); - } + DefaultHttpRequestInitializer(GoogleCredential credential, Integer connectTimeoutMillis, Integer readTimeoutMillis) { + this.credential = credential; + this.connectTimeout = connectTimeoutMillis; + this.readTimeout = readTimeoutMillis; + } - private ExponentialBackOff newBackOff() { - return new ExponentialBackOff.Builder() - .setInitialIntervalMillis(100) - .setMaxIntervalMillis(6000) - .setMaxElapsedTimeMillis(900000) - .setMultiplier(1.5) - .setRandomizationFactor(0.5) - .build(); + @Override + public void initialize(HttpRequest request) { + if (connectTimeout != null) { + request.setConnectTimeout(connectTimeout); } - } - } + if (readTimeout != null) { + request.setReadTimeout(readTimeout); + } + + request.setIOExceptionHandler(new HttpBackOffIOExceptionHandler(newBackOff())); + request.setInterceptor(credential); - /** Load all secure credentials from the settings. */ - static Map loadClientCredentials(Settings settings) { - Map credentials = new HashMap<>(); - Iterable> iterable = CREDENTIALS_FILE_SETTING.getAllConcreteSettings(settings)::iterator; - for (Setting concreteSetting : iterable) { - try (InputStream credStream = concreteSetting.get(settings)) { - GoogleCredential credential = GoogleCredential.fromStream(credStream); - if (credential.createScopedRequired()) { - credential = credential.createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); + final HttpUnsuccessfulResponseHandler handler = new HttpBackOffUnsuccessfulResponseHandler(newBackOff()); + request.setUnsuccessfulResponseHandler((req, resp, supportsRetry) -> { + // Let the credential handle the response. If it failed, we rely on our backoff handler + return credential.handleResponse(req, resp, supportsRetry) || handler.handleResponse(req, resp, supportsRetry); } - credentials.put(CREDENTIALS_FILE_SETTING.getNamespace(concreteSetting), credential); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + ); + } + + private ExponentialBackOff newBackOff() { + return new ExponentialBackOff.Builder() + .setInitialIntervalMillis(100) + .setMaxIntervalMillis(6000) + .setMaxElapsedTimeMillis(900000) + .setMultiplier(1.5) + .setRandomizationFactor(0.5) + .build(); } - return credentials; } + } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index dbad40ec08393..9829b9db77226 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -32,8 +32,9 @@ import java.net.SocketPermission; import java.security.AccessController; -import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -48,7 +49,7 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos @Override protected Collection> nodePlugins() { - return Arrays.asList(MockGoogleCloudStoragePlugin.class); + return Collections.singletonList(MockGoogleCloudStoragePlugin.class); } @Override @@ -58,7 +59,6 @@ protected void createTestRepository(String name) { .setSettings(Settings.builder() .put("bucket", BUCKET) .put("base_path", GoogleCloudStorageBlobStoreRepositoryTests.class.getSimpleName()) - .put("service_account", "_default_") .put("compress", randomBoolean()) .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); } @@ -69,19 +69,26 @@ public static void setUpStorage() { } public static class MockGoogleCloudStoragePlugin extends GoogleCloudStoragePlugin { - public MockGoogleCloudStoragePlugin() { - super(Settings.EMPTY); + public MockGoogleCloudStoragePlugin(final Settings settings) { + super(settings); } @Override protected GoogleCloudStorageService createStorageService(Environment environment) { - return new MockGoogleCloudStorageService(); + return new MockGoogleCloudStorageService(environment, getClientsSettings()); } } - public static class MockGoogleCloudStorageService implements GoogleCloudStorageService { + public static class MockGoogleCloudStorageService extends GoogleCloudStorageService { + + MockGoogleCloudStorageService(Environment environment, Map clientsSettings) { + super(environment, clientsSettings); + } + @Override - public Storage createClient(String accountName, String application, - TimeValue connectTimeout, TimeValue readTimeout) throws Exception { + public Storage createClient(final String clientName, + final String application, + final TimeValue connectTimeout, + final TimeValue readTimeout) { // The actual impl might open a connection. So check we have permission when this call is made. AccessController.checkPermission(new SocketPermission("*", "connect")); return storage.get(); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java new file mode 100644 index 0000000000000..badd86cd8a2b3 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -0,0 +1,197 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.gcs; + +import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; +import com.google.api.services.storage.StorageScopes; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.nio.charset.StandardCharsets; +import java.security.KeyPair; +import java.security.KeyPairGenerator; +import java.util.Base64; +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.getClientSettings; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.loadCredential; + +public class GoogleCloudStorageClientSettingsTests extends ESTestCase { + + public void testLoadWithEmptySettings() { + Map clientsSettings = GoogleCloudStorageClientSettings.load(Settings.EMPTY); + assertEquals(1, clientsSettings.size()); + assertNotNull(clientsSettings.get("default")); + } + + public void testLoad() throws Exception { + final int nbClients = randomIntBetween(1, 5); + final Tuple, Settings> randomClients = randomClients(nbClients); + final Map expectedClientsSettings = randomClients.v1(); + + Map actualClientsSettings = GoogleCloudStorageClientSettings.load(randomClients.v2()); + assertEquals(expectedClientsSettings.size(), actualClientsSettings.size()); + + for (String clientName : expectedClientsSettings.keySet()) { + GoogleCloudStorageClientSettings actualClientSettings = actualClientsSettings.get(clientName); + assertNotNull(actualClientSettings); + GoogleCloudStorageClientSettings expectedClientSettings = expectedClientsSettings.get(clientName); + assertNotNull(expectedClientSettings); + + assertGoogleCredential(expectedClientSettings.getCredential(), actualClientSettings.getCredential()); + assertEquals(expectedClientSettings.getEndpoint(), actualClientSettings.getEndpoint()); + assertEquals(expectedClientSettings.getConnectTimeout(), actualClientSettings.getConnectTimeout()); + assertEquals(expectedClientSettings.getReadTimeout(), actualClientSettings.getReadTimeout()); + assertEquals(expectedClientSettings.getApplicationName(), actualClientSettings.getApplicationName()); + } + } + + public void testLoadCredential() throws Exception { + Tuple, Settings> randomClient = randomClients(1); + GoogleCloudStorageClientSettings expectedClientSettings = randomClient.v1().values().iterator().next(); + String clientName = randomClient.v1().keySet().iterator().next(); + + assertGoogleCredential(expectedClientSettings.getCredential(), loadCredential(randomClient.v2(), clientName)); + } + + /** Generates a given number of GoogleCloudStorageClientSettings along with the Settings to build them from **/ + private Tuple, Settings> randomClients(final int nbClients) throws Exception { + final Map expectedClients = new HashMap<>(); + expectedClients.put("default", getClientSettings(Settings.EMPTY, "default")); + + final Settings.Builder settings = Settings.builder(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + + for (int i = 0; i < nbClients; i++) { + String clientName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + + GoogleCloudStorageClientSettings clientSettings = randomClient(clientName, settings, secureSettings); + expectedClients.put(clientName, clientSettings); + } + + if (randomBoolean()) { + GoogleCloudStorageClientSettings clientSettings = randomClient("default", settings, secureSettings); + expectedClients.put("default", clientSettings); + } + + return Tuple.tuple(expectedClients, settings.setSecureSettings(secureSettings).build()); + } + + /** Generates a random GoogleCloudStorageClientSettings along with the Settings to build it **/ + private static GoogleCloudStorageClientSettings randomClient(final String clientName, + final Settings.Builder settings, + final MockSecureSettings secureSettings) throws Exception { + + Tuple credentials = randomCredential(clientName); + GoogleCredential credential = credentials.v1(); + secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).getKey(), credentials.v2()); + + String endpoint; + if (randomBoolean()) { + endpoint = randomAlphaOfLength(5); + settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint); + } else { + endpoint = ENDPOINT_SETTING.getDefault(Settings.EMPTY); + } + + TimeValue connectTimeout; + if (randomBoolean()) { + connectTimeout = randomTimeout(); + settings.put(CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), connectTimeout.getStringRep()); + } else { + connectTimeout = CONNECT_TIMEOUT_SETTING.getDefault(Settings.EMPTY); + } + + TimeValue readTimeout; + if (randomBoolean()) { + readTimeout = randomTimeout(); + settings.put(READ_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), readTimeout.getStringRep()); + } else { + readTimeout = READ_TIMEOUT_SETTING.getDefault(Settings.EMPTY); + } + + String applicationName; + if (randomBoolean()) { + applicationName = randomAlphaOfLength(5); + settings.put(APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), applicationName); + } else { + applicationName = APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY); + } + + return new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, readTimeout, applicationName); + } + + /** Generates a random GoogleCredential along with its corresponding Service Account file provided as a byte array **/ + private static Tuple randomCredential(final String clientName) throws Exception { + KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); + + GoogleCredential.Builder credentialBuilder = new GoogleCredential.Builder(); + credentialBuilder.setServiceAccountId(clientName); + credentialBuilder.setServiceAccountProjectId("project_id_" + clientName); + credentialBuilder.setServiceAccountScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); + credentialBuilder.setServiceAccountPrivateKey(keyPair.getPrivate()); + credentialBuilder.setServiceAccountPrivateKeyId("private_key_id_" + clientName); + + String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); + String serviceAccount = "{\"type\":\"service_account\"," + + "\"project_id\":\"project_id_" + clientName + "\"," + + "\"private_key_id\":\"private_key_id_" + clientName + "\"," + + "\"private_key\":\"-----BEGIN PRIVATE KEY-----\\n" + + encodedPrivateKey + + "\\n-----END PRIVATE KEY-----\\n\"," + + "\"client_email\":\"" + clientName + "\"," + + "\"client_id\":\"id_" + clientName + "\"," + + "\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\"," + + "\"token_uri\":\"https://accounts.google.com/o/oauth2/token\"," + + "\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"," + + "\"client_x509_cert_url\":\"https://www.googleapis.com/robot/v1/metadata/x509/" + + clientName + + "%40appspot.gserviceaccount.com\"}"; + + return Tuple.tuple(credentialBuilder.build(), serviceAccount.getBytes(StandardCharsets.UTF_8)); + } + + private static TimeValue randomTimeout() { + return randomFrom(TimeValue.MINUS_ONE, TimeValue.ZERO, TimeValue.parseTimeValue(randomPositiveTimeValue(), "test")); + } + + private static void assertGoogleCredential(final GoogleCredential expected, final GoogleCredential actual) { + if (expected != null) { + assertEquals(expected.getServiceAccountUser(), actual.getServiceAccountUser()); + assertEquals(expected.getServiceAccountId(), actual.getServiceAccountId()); + assertEquals(expected.getServiceAccountProjectId(), actual.getServiceAccountProjectId()); + assertEquals(expected.getServiceAccountScopesAsString(), actual.getServiceAccountScopesAsString()); + assertEquals(expected.getServiceAccountPrivateKey(), actual.getServiceAccountPrivateKey()); + assertEquals(expected.getServiceAccountPrivateKeyId(), actual.getServiceAccountPrivateKeyId()); + } else { + assertNull(actual); + } + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryDeprecationTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryDeprecationTests.java new file mode 100644 index 0000000000000..473424986a0e5 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryDeprecationTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.gcs; + +import com.google.api.services.storage.Storage; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.test.ESTestCase; + +public class GoogleCloudStorageRepositoryDeprecationTests extends ESTestCase { + + public void testDeprecatedSettings() throws Exception { + final Settings repositorySettings = Settings.builder() + .put("bucket", "test") + .put("application_name", "deprecated") + .put("http.read_timeout", "10s") + .put("http.connect_timeout", "20s") + .build(); + + final RepositoryMetaData repositoryMetaData = new RepositoryMetaData("test", "gcs", repositorySettings); + final Environment environment = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); + + new GoogleCloudStorageRepository(repositoryMetaData, environment, NamedXContentRegistry.EMPTY, + new GoogleCloudStorageService(environment, GoogleCloudStorageClientSettings.load(Settings.EMPTY)) { + @Override + public Storage createClient(String clientName, String application, TimeValue connect, TimeValue read) throws Exception { + return MockStorage.newStorageClient("test", "deprecated"); + } + }); + + assertWarnings( + "Setting [application_name] in repository settings is deprecated, it must be specified in the client settings instead", + "Setting [http.read_timeout] in repository settings is deprecated, it must be specified in the client settings instead", + "Setting [http.connect_timeout] in repository settings is deprecated, it must be specified in the client settings instead"); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 07bd6974c6513..59931fe623ee4 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -31,17 +31,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.repositories.gcs.GoogleCloudStorageService.InternalGoogleCloudStorageService; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.io.InputStream; -import java.util.Collections; -import java.util.Map; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Mockito.mock; @@ -51,34 +44,8 @@ public class GoogleCloudStorageServiceTests extends ESTestCase { - private InputStream getDummyCredentialStream() throws IOException { - return GoogleCloudStorageServiceTests.class.getResourceAsStream("/dummy-account.json"); - } - - public void testDefaultCredential() throws Exception { - Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); - GoogleCredential cred = GoogleCredential.fromStream(getDummyCredentialStream()); - InternalGoogleCloudStorageService service = new InternalGoogleCloudStorageService(env, Collections.emptyMap()) { - @Override - GoogleCredential getDefaultCredential() throws IOException { - return cred; - } - }; - assertSame(cred, service.getCredential("default")); - - service.new DefaultHttpRequestInitializer(cred, null, null); - } - - public void testClientCredential() throws Exception { - GoogleCredential cred = GoogleCredential.fromStream(getDummyCredentialStream()); - Map credentials = singletonMap("clientname", cred); - Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); - InternalGoogleCloudStorageService service = new InternalGoogleCloudStorageService(env, credentials); - assertSame(cred, service.getCredential("clientname")); - } - /** - * Test that the {@link InternalGoogleCloudStorageService.DefaultHttpRequestInitializer} attaches new instances + * Test that the {@link GoogleCloudStorageService.DefaultHttpRequestInitializer} attaches new instances * of {@link HttpIOExceptionHandler} and {@link HttpUnsuccessfulResponseHandler} for every HTTP requests. */ public void testDefaultHttpRequestInitializer() throws IOException { @@ -88,23 +55,44 @@ public void testDefaultHttpRequestInitializer() throws IOException { final GoogleCredential credential = mock(GoogleCredential.class); when(credential.handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean())).thenReturn(false); + final String endpoint = randomBoolean() ? randomAlphaOfLength(10) : null; + final TimeValue readTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); final TimeValue connectTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); + final String applicationName = randomBoolean() ? randomAlphaOfLength(10) : null; + + final boolean useDeprecatedSettings = true; + + final TimeValue deprecatedReadTimeout = useDeprecatedSettings ? TimeValue.timeValueSeconds(randomIntBetween(1, 120)) : null; + final TimeValue deprecatedConnectTimeout = useDeprecatedSettings ? TimeValue.timeValueSeconds(randomIntBetween(1, 120)) : null; - final InternalGoogleCloudStorageService service = new InternalGoogleCloudStorageService(environment, emptyMap()); - final HttpRequestInitializer initializer = service.new DefaultHttpRequestInitializer(credential, connectTimeout, readTimeout); + final GoogleCloudStorageClientSettings clientSettings = + new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, readTimeout, applicationName); + + final HttpRequestInitializer initializer = + GoogleCloudStorageService.createRequestInitializer(clientSettings, deprecatedConnectTimeout, deprecatedReadTimeout); final HttpRequestFactory requestFactory = new MockHttpTransport().createRequestFactory(initializer); final HttpRequest request1 = requestFactory.buildGetRequest(new GenericUrl()); - assertEquals((int) connectTimeout.millis(), request1.getConnectTimeout()); - assertEquals((int) readTimeout.millis(), request1.getReadTimeout()); + if (useDeprecatedSettings) { + assertEquals((int) deprecatedConnectTimeout.millis(), request1.getConnectTimeout()); + assertEquals((int) deprecatedReadTimeout.millis(), request1.getReadTimeout()); + } else { + assertEquals((int) connectTimeout.millis(), request1.getConnectTimeout()); + assertEquals((int) readTimeout.millis(), request1.getReadTimeout()); + } assertSame(credential, request1.getInterceptor()); assertNotNull(request1.getIOExceptionHandler()); assertNotNull(request1.getUnsuccessfulResponseHandler()); final HttpRequest request2 = requestFactory.buildGetRequest(new GenericUrl()); - assertEquals((int) connectTimeout.millis(), request2.getConnectTimeout()); - assertEquals((int) readTimeout.millis(), request2.getReadTimeout()); + if (useDeprecatedSettings) { + assertEquals((int) deprecatedConnectTimeout.millis(), request2.getConnectTimeout()); + assertEquals((int) deprecatedReadTimeout.millis(), request2.getReadTimeout()); + } else { + assertEquals((int) connectTimeout.millis(), request2.getConnectTimeout()); + assertEquals((int) readTimeout.millis(), request2.getReadTimeout()); + } assertSame(request1.getInterceptor(), request2.getInterceptor()); assertNotNull(request2.getIOExceptionHandler()); assertNotSame(request1.getIOExceptionHandler(), request2.getIOExceptionHandler()); @@ -117,4 +105,10 @@ public void testDefaultHttpRequestInitializer() throws IOException { request2.getUnsuccessfulResponseHandler().handleResponse(null, null, false); verify(credential, times(2)).handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean()); } + + public void testToTimeout() { + assertNull(GoogleCloudStorageService.toTimeout(null)); + assertNull(GoogleCloudStorageService.toTimeout(TimeValue.ZERO)); + assertEquals(0, GoogleCloudStorageService.toTimeout(TimeValue.MINUS_ONE).intValue()); + } } diff --git a/plugins/repository-gcs/src/test/resources/dummy-account.json b/plugins/repository-gcs/src/test/resources/dummy-account.json deleted file mode 100644 index e282b6db0e3b0..0000000000000 --- a/plugins/repository-gcs/src/test/resources/dummy-account.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "service_account", - "project_id": "some-project-name", - "private_key_id": "c7cefcb7c72a2880ecce49cb9d1095de5a61aff0", - "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDa+7r0RE1YykXC\n+d+DXlN3Dg3aL1YOfYuhy5PF/Vi0FFQHyXuPtAvkVHZD2NxMDZq2DxTu3AVLh1UE\nt2hMrWjDQDuArPl8FezpyYQwde04Qlx1YpQ1xUjTaFWd0hrOZEfsxY00h3ilxR3G\nJsofR3PZBKYI11VGruNemCgjiJg5hcoJDxLXUgcfpKJaiPeHutczCeZ1RANQwQF1\n/dPXhqbtWiaS/iu5so64P54TsrVX5DcXmbGr6hQAReIcI6cjA8QhSu6QBtdvEPhv\n27uTuSu4XRtTh3djVGFzFV9pamGZeGELkTiHVSDI8IkQ32s8yuP5Zys/4bFJk7nn\niqJpe0/DAgMBAAECggEAEexQnPWKLx4/H3o8JRBvXGs2DwmYzY7RAukaqzXVMMgJ\nKKoBBv4Biyquk1cIkOD8LLKHUBWKCWiGOOCaFMyMqo5zUFDYCqPwxCHOQ/ki9VvZ\nHXJ4Fv6Su1rqxwQPVZ03ldWFfSspYMgFa9Z47J54iOasgES/og1mZrOldWMUsoBu\nCKf0fH+vIsxWPwmRtyxKCMwqenqdc22nGGLhmpm8tuw1eQp6XtTXagqkPtAVMMga\nmgC0EGqhZA/IklGW1JuGWELjXVMgS/tLIPq+hYsmY14y6Ie032YoSMWkz6Z5p7i0\n/JwCzVZNO1mD0MwVj7nDmokXOpoyM7Qcbx8r1E4Q4QKBgQDxqAZ6D+A671mCNU0J\n6Qzc3cOZq7MBj4y7M/2qPXHC7i/DdbmnM7PPPriaBBch2nX7jZRlRmVDBsmrC4OG\n3m5+HAx7YPVbefwe6h5ki5O9wg1pLcgYY9uvgLSlD85lVZKAzO7QK2W5zfM19kPD\nSckIa+U7DKFbwKhtCsxcP6ARJwKBgQDn+zAPHepGP2Zf78pOLlVXcQqVA1uOu+bW\nrG4G+lPrytB0C4QdwWdBV3Lcqmnw/ae5PkQBs0dCbtWG8+MT8gA6k5kleflaZrAY\ngdUJIUP6J7ocWYxVTfqGFyFF1n5VT8/jbVucaT7izBZfZvlGyf7Vz7ewQzgWQWlK\nCQ0qstV2BQKBgQCajAQAYlDcQCC1dlMbqHDye91RVQ65S84MF1b+Xid4LA5d6dde\nyGERhKJY1Y7ZtrZHt6cVEe1G7XtiKY3nXi+59URCT6L66svEFaR0VxOYgxdCkeXr\nO0nPNvfQrIgqJIz6VJXSij6XktAdTa7OoUyxVxeWKSC05kSQ4BwMTyCWdwKBgQCW\noqlmZ4qE6w5TJaY8diG8kg7JDFEbsjAHHhikN1DfP+d0MzYrDDc8WsifOZlpf4y1\n4RTP9dZD8Sx+YUgG35H+d3FuwHGGnj+i6kunjg5SFhHn7s4NZoFTKRnV+541T4oy\nqARg4IaRRu0QLhGYQfpUZHlm339AFGGGTbJbE51A8QKBgQDTEN5O+3bRG3Fa1J6z\nU9PMrjjs6l8xhXFso10YEYG5KRnfhzCFujyWNiLE6WrlUL8invVBaCxsZr51GDgA\nhyEEdm4kXCRrv4JyhOvIuGxNcAIiQK/e91UQEM6u1t6hUI1rE7ZOyJQzBxj9hFlV\n7OvhBlHXQUtAOdq0XLHr9GzdSA==\n-----END PRIVATE KEY-----\n", - "client_email": "some-project-name@appspot.gserviceaccount.com", - "client_id": "123456789101112130594", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://accounts.google.com/o/oauth2/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/some-project-name%40appspot.gserviceaccount.com" -} From be8ae87a50890eacc7fd9311b528ac728f710907 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 9 Mar 2018 13:57:27 +0100 Subject: [PATCH 16/89] Use fixture to test the repository-gcs plugin (#28788) This commit adds a GoogleCloudStorageFixture that uses the logic of a GoogleCloudStorageTestServer (added in #28576) to emulate a remote Google Cloud Storage service. By adding this fixture and a more complete integration test, we should be able to catch more bugs when upgrading the client library. The fixture is started by the googleCloudStorageFixture task and a custom Service Account file is created and added to the Elasticsearch keystore for each test. --- plugins/repository-gcs/build.gradle | 50 ++++ .../gcs/GoogleCloudStorageFixture.java | 135 +++++++++++ .../gcs/GoogleCloudStorageTestServer.java | 214 +++++++++++++----- .../repositories/gcs/MockStorage.java | 3 +- .../test/repository_gcs/10_basic.yml | 177 ++++++++++++++- .../test/repository_gcs/10_basic.yml~ | 18 ++ 6 files changed, 536 insertions(+), 61 deletions(-) create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java create mode 100644 plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml~ diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 7d3246376a8e5..450af05b75ac1 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -1,3 +1,8 @@ +import org.elasticsearch.gradle.test.AntFixture + +import java.security.KeyPair +import java.security.KeyPairGenerator + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -52,3 +57,48 @@ thirdPartyAudit.excludes = [ 'org.apache.log.Hierarchy', 'org.apache.log.Logger', ] + +/** A task to start the GoogleCloudStorageFixture which emulates a Google Cloud Storage service **/ +task googleCloudStorageFixture(type: AntFixture) { + dependsOn compileTestJava + executable = new File(project.runtimeJavaHome, 'bin/java') + args '-cp', "${ -> project.sourceSets.test.runtimeClasspath.asPath }", + 'org.elasticsearch.repositories.gcs.GoogleCloudStorageFixture', + baseDir, 'bucket_test' +} + +/** A service account file that points to the Google Cloud Storage service emulated by the fixture **/ +File serviceAccountFile = new File(project.buildDir, "generated-resources/service_account_test.json") +task createServiceAccountFile() { + dependsOn googleCloudStorageFixture + doLast { + KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA") + keyPairGenerator.initialize(1024) + KeyPair keyPair = keyPairGenerator.generateKeyPair() + String encodedKey = Base64.getEncoder().encodeToString(keyPair.private.getEncoded()) + + serviceAccountFile.parentFile.mkdirs() + serviceAccountFile.setText("{\n" + + ' "type": "service_account",\n' + + ' "project_id": "integration_test",\n' + + ' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' + + ' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' + + ' "client_email": "integration_test@appspot.gserviceaccount.com",\n' + + ' "client_id": "123456789101112130594",\n' + + " \"auth_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/auth\",\n" + + " \"token_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/token\",\n" + + ' "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",\n' + + ' "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/integration_test%40appspot.gserviceaccount.com"\n' + + '}', 'UTF-8') + } +} + +integTestCluster { + dependsOn createServiceAccountFile, googleCloudStorageFixture + setupCommand 'create-elasticsearch-keystore', 'bin/elasticsearch-keystore', 'create' + setupCommand 'add-credentials-to-elasticsearch-keystore', + 'bin/elasticsearch-keystore', 'add-file', 'gcs.client.integration_test.credentials_file', "${serviceAccountFile.absolutePath}" + + /* Use a closure on the string to delay evaluation until tests are executed */ + setting 'gcs.client.integration_test.endpoint', "http://${ -> googleCloudStorageFixture.addressAndPort }" +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java new file mode 100644 index 0000000000000..cddcab870de34 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java @@ -0,0 +1,135 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.gcs; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.mocksocket.MockHttpServer; +import org.elasticsearch.repositories.gcs.GoogleCloudStorageTestServer.Response; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.net.Inet6Address; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.List; +import java.util.Map; + +import static java.util.Collections.singleton; +import static java.util.Collections.singletonList; + +/** + * {@link GoogleCloudStorageFixture} is a fixture that emulates a Google Cloud Storage service. + *

+ * It starts an asynchronous socket server that binds to a random local port. The server parses + * HTTP requests and uses a {@link GoogleCloudStorageTestServer} to handle them before returning + * them to the client as HTTP responses. + */ +public class GoogleCloudStorageFixture { + + @SuppressForbidden(reason = "PathUtils#get is fine - we don't have environment here") + public static void main(String[] args) throws Exception { + if (args == null || args.length != 2) { + throw new IllegalArgumentException("GoogleCloudStorageFixture "); + } + + final InetSocketAddress socketAddress = new InetSocketAddress(InetAddress.getLoopbackAddress(), 43635); + final HttpServer httpServer = MockHttpServer.createHttp(socketAddress, 0); + + try { + final Path workingDirectory = Paths.get(args[0]); + /// Writes the PID of the current Java process in a `pid` file located in the working directory + writeFile(workingDirectory, "pid", ManagementFactory.getRuntimeMXBean().getName().split("@")[0]); + + final String addressAndPort = addressToString(httpServer.getAddress()); + // Writes the address and port of the http server in a `ports` file located in the working directory + writeFile(workingDirectory, "ports", addressAndPort); + + // Emulates a Google Cloud Storage server + final String storageUrl = "http://" + addressAndPort; + final GoogleCloudStorageTestServer storageTestServer = new GoogleCloudStorageTestServer(storageUrl); + storageTestServer.createBucket(args[1]); + + httpServer.createContext("/", new ResponseHandler(storageTestServer)); + httpServer.start(); + + // Wait to be killed + Thread.sleep(Long.MAX_VALUE); + + } finally { + httpServer.stop(0); + } + } + + private static void writeFile(final Path dir, final String fileName, final String content) throws IOException { + final Path tempPidFile = Files.createTempFile(dir, null, null); + Files.write(tempPidFile, singleton(content)); + Files.move(tempPidFile, dir.resolve(fileName), StandardCopyOption.ATOMIC_MOVE); + } + + private static String addressToString(final SocketAddress address) { + final InetSocketAddress inetSocketAddress = (InetSocketAddress) address; + if (inetSocketAddress.getAddress() instanceof Inet6Address) { + return "[" + inetSocketAddress.getHostString() + "]:" + inetSocketAddress.getPort(); + } else { + return inetSocketAddress.getHostString() + ":" + inetSocketAddress.getPort(); + } + } + + @SuppressForbidden(reason = "Use a http server") + static class ResponseHandler implements HttpHandler { + + private final GoogleCloudStorageTestServer storageServer; + + private ResponseHandler(final GoogleCloudStorageTestServer storageServer) { + this.storageServer = storageServer; + } + + @Override + public void handle(HttpExchange exchange) throws IOException { + String method = exchange.getRequestMethod(); + String path = storageServer.getEndpoint() + exchange.getRequestURI().getRawPath(); + String query = exchange.getRequestURI().getRawQuery(); + Map> headers = exchange.getRequestHeaders(); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + Streams.copy(exchange.getRequestBody(), out); + + final Response storageResponse = storageServer.handle(method, path, query, headers, out.toByteArray()); + + Map> responseHeaders = exchange.getResponseHeaders(); + responseHeaders.put("Content-Type", singletonList(storageResponse.contentType)); + storageResponse.headers.forEach((k, v) -> responseHeaders.put(k, singletonList(v))); + exchange.sendResponseHeaders(storageResponse.status.getStatus(), storageResponse.body.length); + if (storageResponse.body.length > 0) { + exchange.getResponseBody().write(storageResponse.body); + } + exchange.close(); + } + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 17255fa90ed2a..6610895e1f497 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories.gcs; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -27,10 +26,11 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; -import java.io.BufferedInputStream; +import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; @@ -39,13 +39,15 @@ import java.util.Objects; import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; /** - * {@link GoogleCloudStorageTestServer} emulates a Google Cloud Storage service through a {@link #handle(String, String, byte[])} method - * that provides appropriate responses for specific requests like the real Google Cloud platform would do. It is largely based on official - * documentation available at https://cloud.google.com/storage/docs/json_api/v1/. + * {@link GoogleCloudStorageTestServer} emulates a Google Cloud Storage service through + * a {@link #handle(String, String, String, Map, byte[])} method that provides appropriate + * responses for specific requests like the real Google Cloud platform would do. + * It is largely based on official documentation available at https://cloud.google.com/storage/docs/json_api/v1/. */ public class GoogleCloudStorageTestServer { @@ -57,19 +59,22 @@ public class GoogleCloudStorageTestServer { /** Request handlers for the requests made by the Google Cloud Storage client **/ private final PathTrie handlers; + /** Server endpoint **/ + private final String endpoint; + /** * Creates a {@link GoogleCloudStorageTestServer} with the default endpoint */ GoogleCloudStorageTestServer() { - this("https://www.googleapis.com", true); + this("https://www.googleapis.com"); } /** - * Creates a {@link GoogleCloudStorageTestServer} with a custom endpoint, - * potentially prefixing the URL patterns to match with the endpoint name. + * Creates a {@link GoogleCloudStorageTestServer} with a custom endpoint */ - GoogleCloudStorageTestServer(final String endpoint, final boolean prefixWithEndpoint) { - this.handlers = defaultHandlers(endpoint, prefixWithEndpoint, buckets); + GoogleCloudStorageTestServer(final String endpoint) { + this.endpoint = Objects.requireNonNull(endpoint, "endpoint must not be null"); + this.handlers = defaultHandlers(endpoint, buckets); } /** Creates a bucket in the test server **/ @@ -77,24 +82,61 @@ void createBucket(final String bucketName) { buckets.put(bucketName, new Bucket(bucketName)); } - public Response handle(final String method, final String url, byte[] content) throws IOException { - final Map params = new HashMap<>(); + public String getEndpoint() { + return endpoint; + } - // Splits the URL to extract query string parameters - final String rawPath; - int questionMark = url.indexOf('?'); - if (questionMark != -1) { - rawPath = url.substring(0, questionMark); - RestUtils.decodeQueryString(url, questionMark + 1, params); - } else { - rawPath = url; + /** + * Returns a Google Cloud Storage response for the given request + * + * @param method the HTTP method of the request + * @param url the HTTP URL of the request + * @param headers the HTTP headers of the request + * @param body the HTTP request body + * @return a {@link Response} + * + * @throws IOException if something goes wrong + */ + public Response handle(final String method, + final String url, + final Map> headers, + byte[] body) throws IOException { + + final int questionMark = url.indexOf('?'); + if (questionMark == -1) { + return handle(method, url, null, headers, body); } + return handle(method, url.substring(0, questionMark), url.substring(questionMark + 1), headers, body); + } + + /** + * Returns a Google Cloud Storage response for the given request + * + * @param method the HTTP method of the request + * @param path the path of the URL of the request + * @param query the queryString of the URL of request + * @param headers the HTTP headers of the request + * @param body the HTTP request body + * @return a {@link Response} + * @throws IOException if something goes wrong + */ + public Response handle(final String method, + final String path, + final String query, + final Map> headers, + byte[] body) throws IOException { - final RequestHandler handler = handlers.retrieve(method + " " + rawPath, params); + final Map params = new HashMap<>(); + if (query != null) { + RestUtils.decodeQueryString(query, 0, params); + } + + final RequestHandler handler = handlers.retrieve(method + " " + path, params); if (handler != null) { - return handler.execute(url, params, content); + return handler.execute(params, headers, body); } else { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "No handler defined for request [method: " + method + ", url: " + url + "]"); + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "No handler defined for request [method: " + method + ", path: " + path + "]"); } } @@ -104,28 +146,24 @@ interface RequestHandler { /** * Simulates the execution of a Storage request and returns a corresponding response. * - * @param url the request URL - * @param params the request URL parameters + * @param params the request's query string parameters + * @param headers the request's headers * @param body the request body provided as a byte array * @return the corresponding response * * @throws IOException if something goes wrong */ - Response execute(String url, Map params, byte[] body) throws IOException; + Response execute(Map params, Map> headers, byte[] body) throws IOException; } /** Builds the default request handlers **/ - private static PathTrie defaultHandlers(final String endpoint, - final boolean prefixWithEndpoint, - final Map buckets) { - + private static PathTrie defaultHandlers(final String endpoint, final Map buckets) { final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER); - final String prefix = prefixWithEndpoint ? endpoint : ""; // GET Bucket // // https://cloud.google.com/storage/docs/json_api/v1/buckets/get - handlers.insert("GET " + prefix + "/storage/v1/b/{bucket}", (url, params, body) -> { + handlers.insert("GET " + endpoint + "/storage/v1/b/{bucket}", (params, headers, body) -> { String name = params.get("bucket"); if (Strings.hasText(name) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "bucket name is missing"); @@ -141,7 +179,7 @@ private static PathTrie defaultHandlers(final String endpoint, // GET Object // // https://cloud.google.com/storage/docs/json_api/v1/objects/get - handlers.insert("GET " + prefix + "/storage/v1/b/{bucket}/o/{object}", (url, params, body) -> { + handlers.insert("GET " + endpoint + "/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { String objectName = params.get("object"); if (Strings.hasText(objectName) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); @@ -163,7 +201,7 @@ private static PathTrie defaultHandlers(final String endpoint, // Delete Object // // https://cloud.google.com/storage/docs/json_api/v1/objects/delete - handlers.insert("DELETE " + prefix + "/storage/v1/b/{bucket}/o/{object}", (url, params, body) -> { + handlers.insert("DELETE " + endpoint + "/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { String objectName = params.get("object"); if (Strings.hasText(objectName) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); @@ -184,7 +222,7 @@ private static PathTrie defaultHandlers(final String endpoint, // Insert Object (initialization) // // https://cloud.google.com/storage/docs/json_api/v1/objects/insert - handlers.insert("POST " + prefix + "/upload/storage/v1/b/{bucket}/o", (url, params, body) -> { + handlers.insert("POST " + endpoint + "/upload/storage/v1/b/{bucket}/o", (params, headers, body) -> { if ("resumable".equals(params.get("uploadType")) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable"); } @@ -210,7 +248,7 @@ private static PathTrie defaultHandlers(final String endpoint, // Insert Object (upload) // // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload - handlers.insert("PUT " + prefix + "/upload/storage/v1/b/{bucket}/o", (url, params, body) -> { + handlers.insert("PUT " + endpoint + "/upload/storage/v1/b/{bucket}/o", (params, headers, body) -> { String objectId = params.get("upload_id"); if (Strings.hasText(objectId) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload id is missing"); @@ -232,7 +270,7 @@ private static PathTrie defaultHandlers(final String endpoint, // Copy Object // // https://cloud.google.com/storage/docs/json_api/v1/objects/copy - handlers.insert("POST " + prefix + "/storage/v1/b/{srcBucket}/o/{src}/copyTo/b/{destBucket}/o/{dest}", (url, params, body) -> { + handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/copyTo/b/{destBucket}/o/{dest}", (params, headers, body)-> { String source = params.get("src"); if (Strings.hasText(source) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); @@ -265,7 +303,7 @@ private static PathTrie defaultHandlers(final String endpoint, // List Objects // // https://cloud.google.com/storage/docs/json_api/v1/objects/list - handlers.insert("GET " + prefix + "/storage/v1/b/{bucket}/o", (url, params, body) -> { + handlers.insert("GET " + endpoint + "/storage/v1/b/{bucket}/o", (params, headers, body) -> { final Bucket bucket = buckets.get(params.get("bucket")); if (bucket == null) { return newError(RestStatus.NOT_FOUND, "bucket not found"); @@ -293,7 +331,7 @@ private static PathTrie defaultHandlers(final String endpoint, // Download Object // // https://cloud.google.com/storage/docs/request-body - handlers.insert("GET " + prefix + "/download/storage/v1/b/{bucket}/o/{object}", (url, params, body) -> { + handlers.insert("GET " + endpoint + "/download/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { String object = params.get("object"); if (Strings.hasText(object) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object id is missing"); @@ -314,7 +352,7 @@ private static PathTrie defaultHandlers(final String endpoint, // Batch // // https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch - handlers.insert("POST " + prefix + "/batch", (url, params, req) -> { + handlers.insert("POST " + endpoint + "/batch", (params, headers, body) -> { final List batchedResponses = new ArrayList<>(); // A batch request body looks like this: @@ -339,37 +377,88 @@ private static PathTrie defaultHandlers(final String endpoint, // // --__END_OF_PART__-- - // Here we simply process the request body line by line and delegate to other handlers - // if possible. - Streams.readAllLines(new BufferedInputStream(new ByteArrayInputStream(req)), line -> { - final int indexOfHttp = line.indexOf(" HTTP/1.1"); - if (indexOfHttp > 0) { - line = line.substring(0, indexOfHttp); + // Default multipart boundary + String boundary = "__END_OF_PART__"; + + // Determine the multipart boundary + final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); + if (contentTypes != null) { + final String contentType = contentTypes.get(0); + if (contentType != null && contentType.contains("multipart/mixed; boundary=")) { + boundary = contentType.replace("multipart/mixed; boundary=", ""); } + } - RequestHandler handler = handlers.retrieve(line, params); - if (handler != null) { - try { - batchedResponses.add(handler.execute(line, params, req)); - } catch (IOException e) { - batchedResponses.add(newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + // Read line by line the batched requests + try (BufferedReader reader = new BufferedReader( + new InputStreamReader( + new ByteArrayInputStream(body), StandardCharsets.UTF_8))) { + String line; + while ((line = reader.readLine()) != null) { + // Start of a batched request + if (line.equals("--" + boundary)) { + Map> batchedHeaders = new HashMap<>(); + + // Reads the headers, if any + while ((line = reader.readLine()) != null) { + if (line.equals("\r\n") || line.length() == 0) { + // end of headers + break; + } else { + String[] header = line.split(":", 2); + batchedHeaders.put(header[0], singletonList(header[1])); + } + } + + // Reads the method and URL + line = reader.readLine(); + String batchedUrl = line.substring(0, line.lastIndexOf(' ')); + + final Map batchedParams = new HashMap<>(); + int questionMark = batchedUrl.indexOf('?'); + if (questionMark != -1) { + RestUtils.decodeQueryString(batchedUrl.substring(questionMark + 1), 0, batchedParams); + } + + // Reads the body + line = reader.readLine(); + byte[] batchedBody = new byte[0]; + if (line != null || line.startsWith("--" + boundary) == false) { + batchedBody = line.getBytes(StandardCharsets.UTF_8); + } + + // Executes the batched request + RequestHandler handler = handlers.retrieve(batchedUrl, batchedParams); + if (handler != null) { + try { + batchedResponses.add(handler.execute(batchedParams, batchedHeaders, batchedBody)); + } catch (IOException e) { + batchedResponses.add(newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } + } } } - }); + } // Now we can build the response - String boundary = "__END_OF_PART__"; String sep = "--"; String line = "\r\n"; StringBuilder builder = new StringBuilder(); for (Response response : batchedResponses) { builder.append(sep).append(boundary).append(line); + builder.append("Content-Type: application/http").append(line); builder.append(line); - builder.append("HTTP/1.1 ").append(response.status.getStatus()); - builder.append(' ').append(response.status.toString()); - builder.append(line); + builder.append("HTTP/1.1 ") + .append(response.status.getStatus()) + .append(' ') + .append(response.status.toString()) + .append(line); builder.append("Content-Length: ").append(response.body.length).append(line); + builder.append("Content-Type: ").append(response.contentType).append(line); + response.headers.forEach((k, v) -> builder.append(k).append(": ").append(v).append(line)); + builder.append(line); + builder.append(new String(response.body, StandardCharsets.UTF_8)).append(line); builder.append(line); } builder.append(line); @@ -379,6 +468,17 @@ private static PathTrie defaultHandlers(final String endpoint, return new Response(RestStatus.OK, emptyMap(), "multipart/mixed; boundary=" + boundary, content); }); + // Fake refresh of an OAuth2 token + // + handlers.insert("POST " + endpoint + "/o/oauth2/token", (url, params, req) -> + newResponse(RestStatus.OK, emptyMap(), jsonBuilder() + .startObject() + .field("access_token", "unknown") + .field("token_type", "Bearer") + .field("expires_in", 3600) + .endObject()) + ); + return handlers; } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 8be7511ab58c6..a04dae294975a 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -49,8 +49,7 @@ public LowLevelHttpRequest buildRequest(String method, String url) throws IOExce return new MockLowLevelHttpRequest() { @Override public LowLevelHttpResponse execute() throws IOException { - final GoogleCloudStorageTestServer.Response response = server.handle(method, url, getContentAsBytes()); - return convert(response); + return convert(server.handle(method, url, getHeaders(), getContentAsBytes())); } /** Returns the LowLevelHttpRequest body as an array of bytes **/ diff --git a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml index a37fb77954971..62387227cbc9d 100644 --- a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml +++ b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml @@ -1,6 +1,6 @@ -# Integration tests for Repository GCS component +# Integration tests for repository-gcs # -"Repository GCS loaded": +"Plugin repository-gcs is loaded": - do: cluster.state: {} @@ -11,3 +11,176 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: repository-gcs } +--- +"Snapshot/Restore with repository-gcs": + - skip: + version: " - 6.3.0" + reason: repository-gcs was not testable through YAML tests until 6.3.0 + + # Register repository + - do: + snapshot.create_repository: + repository: repository + body: + type: gcs + settings: + bucket: "bucket_test" + client: "integration_test" + + - match: { acknowledged: true } + + # Index documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 1 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 2 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 3 + - snapshot: one + + - do: + count: + index: docs + + - match: {count: 3} + + # Create a first snapshot + - do: + snapshot.create: + repository: repository + snapshot: snapshot-one + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-one } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.include_global_state: true } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.status: + repository: repository + snapshot: snapshot-one + + - is_true: snapshots + - match: { snapshots.0.snapshot: snapshot-one } + - match: { snapshots.0.state : SUCCESS } + + # Index more documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 4 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 5 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 6 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 7 + - snapshot: two + + - do: + count: + index: docs + + - match: {count: 7} + + # Create a second snapshot + - do: + snapshot.create: + repository: repository + snapshot: snapshot-two + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-two } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.get: + repository: repository + snapshot: snapshot-one,snapshot-two + + - is_true: snapshots + - match: { snapshots.0.state : SUCCESS } + - match: { snapshots.1.state : SUCCESS } + + # Delete the index + - do: + indices.delete: + index: docs + + # Restore the second snapshot + - do: + snapshot.restore: + repository: repository + snapshot: snapshot-two + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 7} + + # Delete the index again + - do: + indices.delete: + index: docs + + # Restore the first snapshot + - do: + snapshot.restore: + repository: repository + snapshot: snapshot-one + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 3} + + # Remove the snapshots + - do: + snapshot.delete: + repository: repository + snapshot: snapshot-two + + - do: + snapshot.delete: + repository: repository + snapshot: snapshot-one + + # Remove our repository + - do: + snapshot.delete_repository: + repository: repository + + + + diff --git a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml~ b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml~ new file mode 100644 index 0000000000000..61cc8191c3251 --- /dev/null +++ b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml~ @@ -0,0 +1,18 @@ +# Integration tests for repository-gcs +# +"Plugin repository-gcs is loaded": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: repository-gcs } +-- +"Snapshot/Restore with repository-gcs": + - do: + create.index: + From 01f6788e0e54fafffc599bfc05d77cc23fd9675e Mon Sep 17 00:00:00 2001 From: Paul Sanwald Date: Fri, 9 Mar 2018 11:54:59 -0800 Subject: [PATCH 17/89] Update Gradle Testing Docs (#28970) Attempting to run the REST tests, I noticed the testing instructions in the `TESTING.asciidoc` were outdated, so I fixed the paths. Steps I took to test: * Ran `./gradlew :distribution:packages:rpm:assemble` and make sure RPM is created in `./distribution/packages/rpm/build/distributions/` * Ran testing commands and verified the REST tests ran. --- TESTING.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 784e6fc2dcfd8..e828a9b0a5c6c 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -270,14 +270,14 @@ The REST tests are run automatically when executing the "./gradlew check" comman REST tests use the following command: --------------------------------------------------------------------------- -./gradlew :distribution:integ-test-zip:integTest \ +./gradlew :distribution:archives:integ-test-zip:integTest \ -Dtests.class="org.elasticsearch.test.rest.*Yaml*IT" --------------------------------------------------------------------------- A specific test case can be run with --------------------------------------------------------------------------- -./gradlew :distribution:integ-test-zip:integTest \ +./gradlew :distribution:archives:integ-test-zip:integTest \ -Dtests.class="org.elasticsearch.test.rest.*Yaml*IT" \ -Dtests.method="test {p0=cat.shards/10_basic/Help}" --------------------------------------------------------------------------- @@ -407,7 +407,7 @@ destructive. When working with a single package it's generally faster to run its tests in a tighter loop than Gradle provides. In one window: -------------------------------- -./gradlew :distribution:rpm:assemble +./gradlew :distribution:packages:rpm:assemble -------------------------------- and in another window: From b0819673e1c6dc2e276e8187e8ff38de7b5b0445 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Fri, 9 Mar 2018 14:58:54 -0700 Subject: [PATCH 18/89] Remove Booleans use from XContent and ToXContent (#28768) * Remove Booleans use from XContent and ToXContent This removes the use of the `common.Boolean` class from two of the XContent classes, so they can be decoupled from the ES code as much as possible. Related to #28754, #28504 --- .../common/xcontent/Booleans.java | 46 +++++++++++++++++++ .../common/xcontent/ToXContent.java | 3 +- .../common/xcontent/XContent.java | 4 +- 3 files changed, 48 insertions(+), 5 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/xcontent/Booleans.java diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/Booleans.java b/server/src/main/java/org/elasticsearch/common/xcontent/Booleans.java new file mode 100644 index 0000000000000..21c0ea5fdd08b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/xcontent/Booleans.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent; + +/** + * Helpers for dealing with boolean values. Package-visible only so that only XContent classes use them. + */ +final class Booleans { + /** + * Parse {@code value} with values "true", "false", or null, returning the + * default value if null or the empty string is used. Any other input + * results in an {@link IllegalArgumentException} being thrown. + */ + static boolean parseBoolean(String value, Boolean defaultValue) { + if (value != null && value.length() > 0) { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new IllegalArgumentException("Failed to parse param [" + value + "] as only [true] or [false] are allowed."); + } + } else { + return defaultValue; + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ToXContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/ToXContent.java index 3006363a4ddd4..f74bdec17a9f6 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ToXContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ToXContent.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.xcontent; -import org.elasticsearch.common.Booleans; - import java.io.IOException; import java.util.Map; @@ -132,4 +130,5 @@ public Boolean paramAsBoolean(String key, Boolean defaultValue) { default boolean isFragment() { return true; } + } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContent.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContent.java index 11d019c3291d1..6f6ee4ffdda54 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContent.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContent.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.xcontent; -import org.elasticsearch.common.Booleans; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -51,7 +49,7 @@ public interface XContent { */ static boolean isStrictDuplicateDetectionEnabled() { // Don't allow duplicate keys in JSON content by default but let the user opt out - return Booleans.parseBoolean(System.getProperty("es.xcontent.strict_duplicate_detection", "true")); + return Booleans.parseBoolean(System.getProperty("es.xcontent.strict_duplicate_detection", "true"), true); } /** From 358d3f059f234ace9eeee58cfad99255580b68c2 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 9 Mar 2018 18:23:16 -0500 Subject: [PATCH 19/89] Remove special handling for _all in nodes info Today when requesting _all we return all nodes regardless of what other node qualifiers are in the request. This is contrary to how the remainder of the API behaves which acts as additive and subtractive based on the qualifiers and their ordering. It is also contrary to how the wildcard * behaves. This commit removes the special handling for _all so that it behaves identical to the wildcard *. Relates #28971 --- .../cluster/node/DiscoveryNodes.java | 27 ++++---------- .../cluster/node/DiscoveryNodesTests.java | 37 +++++++++++++++++++ 2 files changed, 44 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 4373069a5f77c..7ac7af2ecd2e7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -39,6 +39,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.stream.StreamSupport; /** * This class holds all {@link DiscoveryNode} in the cluster and provides convenience methods to @@ -229,10 +230,6 @@ public DiscoveryNode findByAddress(TransportAddress address) { return null; } - public boolean isAllNodes(String... nodesIds) { - return nodesIds == null || nodesIds.length == 0 || (nodesIds.length == 1 && nodesIds[0].equals("_all")); - } - /** * Returns the version of the node with the oldest version in the cluster that is not a client node * @@ -301,13 +298,8 @@ public DiscoveryNode resolveNode(String node) { * or a generic node attribute name in which case value will be treated as a wildcard and matched against the node attribute values. */ public String[] resolveNodes(String... nodes) { - if (isAllNodes(nodes)) { - int index = 0; - nodes = new String[this.nodes.size()]; - for (DiscoveryNode node : this) { - nodes[index++] = node.getId(); - } - return nodes; + if (nodes == null || nodes.length == 0) { + return StreamSupport.stream(this.spliterator(), false).map(DiscoveryNode::getId).toArray(String[]::new); } else { ObjectHashSet resolvedNodesIds = new ObjectHashSet<>(nodes.length); for (String nodeId : nodes) { @@ -324,16 +316,11 @@ public String[] resolveNodes(String... nodes) { } else if (nodeExists(nodeId)) { resolvedNodesIds.add(nodeId); } else { - // not a node id, try and search by name - for (DiscoveryNode node : this) { - if (Regex.simpleMatch(nodeId, node.getName())) { - resolvedNodesIds.add(node.getId()); - } - } for (DiscoveryNode node : this) { - if (Regex.simpleMatch(nodeId, node.getHostAddress())) { - resolvedNodesIds.add(node.getId()); - } else if (Regex.simpleMatch(nodeId, node.getHostName())) { + if ("_all".equals(nodeId) + || Regex.simpleMatch(nodeId, node.getName()) + || Regex.simpleMatch(nodeId, node.getHostAddress()) + || Regex.simpleMatch(nodeId, node.getHostName())) { resolvedNodesIds.add(node.getId()); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java index 9200e04c7127a..0401f51a58797 100644 --- a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java @@ -34,9 +34,11 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import java.util.stream.StreamSupport; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.nullValue; @@ -69,6 +71,41 @@ public void testResolveNodeByAttribute() { } } + public void testAll() { + final DiscoveryNodes discoveryNodes = buildDiscoveryNodes(); + + final String[] allNodes = + StreamSupport.stream(discoveryNodes.spliterator(), false).map(DiscoveryNode::getId).toArray(String[]::new); + assertThat(discoveryNodes.resolveNodes(), arrayContainingInAnyOrder(allNodes)); + assertThat(discoveryNodes.resolveNodes(new String[0]), arrayContainingInAnyOrder(allNodes)); + assertThat(discoveryNodes.resolveNodes("_all"), arrayContainingInAnyOrder(allNodes)); + + final String[] nonMasterNodes = + StreamSupport.stream(discoveryNodes.getNodes().values().spliterator(), false) + .map(n -> n.value) + .filter(n -> n.isMasterNode() == false) + .map(DiscoveryNode::getId) + .toArray(String[]::new); + assertThat(discoveryNodes.resolveNodes("_all", "master:false"), arrayContainingInAnyOrder(nonMasterNodes)); + + assertThat(discoveryNodes.resolveNodes("master:false", "_all"), arrayContainingInAnyOrder(allNodes)); + } + + public void testCoordinatorOnlyNodes() { + final DiscoveryNodes discoveryNodes = buildDiscoveryNodes(); + + final String[] coordinatorOnlyNodes = + StreamSupport.stream(discoveryNodes.getNodes().values().spliterator(), false) + .map(n -> n.value) + .filter(n -> n.isDataNode() == false && n.isIngestNode() == false && n.isMasterNode() == false) + .map(DiscoveryNode::getId) + .toArray(String[]::new); + + assertThat( + discoveryNodes.resolveNodes("_all", "data:false", "ingest:false", "master:false"), + arrayContainingInAnyOrder(coordinatorOnlyNodes)); + } + public void testResolveNodesIds() { DiscoveryNodes discoveryNodes = buildDiscoveryNodes(); From 0ca6b7e222686328adbd0a919ee78c3382c6b8f9 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 9 Mar 2018 20:51:37 -0500 Subject: [PATCH 20/89] DOC: add migration help for rollover alias #28110 --- docs/reference/migration/migrate_6_2.asciidoc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/reference/migration/migrate_6_2.asciidoc b/docs/reference/migration/migrate_6_2.asciidoc index 213f0d556521c..81dac028ce300 100644 --- a/docs/reference/migration/migrate_6_2.asciidoc +++ b/docs/reference/migration/migrate_6_2.asciidoc @@ -51,3 +51,9 @@ distributions and use the default logging location use `/var/log/elasticsearch` otherwise if you have installed from the `tar.gz` or `zip` archive distributions and use the default logging location use `logs/gc.log`; otherwise, set the location to the absolute path to your log files. + +=== Rollover alias +A rollover request in Elasticsearch 6.2.0 or later will be aborted if the same +alias is found in the matching index templates. This change is to prevent the alias +from pointing to multiple indices during rollover. If your index templates already +contain the same alias, you should remove it by updating the index templates. \ No newline at end of file From a28987ad9118ecc53578bbe3d40460243135c498 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 10 Mar 2018 07:41:51 -0500 Subject: [PATCH 21/89] Maybe die before failing engine (#28973) Today we check for a few cases where we should maybe die before failing the engine (e.g., when a merge fails). However, there are still other cases where a fatal error can be hidden from us (for example, a failed index writer commit). This commit modifies the mechanism for failing the engine to always check for a fatal error before failing the engine. --- .../elasticsearch/index/engine/Engine.java | 24 +++++++++++++++++++ .../index/engine/InternalEngine.java | 23 ------------------ 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index fb937ed4e9302..1452c5de49278 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -847,11 +847,35 @@ public void forceMerge(boolean flush) throws IOException { */ public abstract IndexCommitRef acquireSafeIndexCommit() throws EngineException; + /** + * If the specified throwable contains a fatal error in the throwable graph, such a fatal error will be thrown. Callers should ensure + * that there are no catch statements that would catch an error in the stack as the fatal error here should go uncaught and be handled + * by the uncaught exception handler that we install during bootstrap. If the specified throwable does indeed contain a fatal error, the + * specified message will attempt to be logged before throwing the fatal error. If the specified throwable does not contain a fatal + * error, this method is a no-op. + * + * @param maybeMessage the message to maybe log + * @param maybeFatal the throwable that maybe contains a fatal error + */ + @SuppressWarnings("finally") + private void maybeDie(final String maybeMessage, final Throwable maybeFatal) { + ExceptionsHelper.maybeError(maybeFatal, logger).ifPresent(error -> { + try { + logger.error(maybeMessage, error); + } finally { + throw error; + } + }); + } + /** * fail engine due to some error. the engine will also be closed. * The underlying store is marked corrupted iff failure is caused by index corruption */ public void failEngine(String reason, @Nullable Exception failure) { + if (failure != null) { + maybeDie(reason, failure); + } if (failEngineLock.tryLock()) { store.incRef(); try { diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index ffb325eb8fa38..dffd45b62badd 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -1788,7 +1788,6 @@ private boolean failOnTragicEvent(AlreadyClosedException ex) { // we need to fail the engine. it might have already been failed before // but we are double-checking it's failed and closed if (indexWriter.isOpen() == false && indexWriter.getTragicException() != null) { - maybeDie("tragic event in index writer", indexWriter.getTragicException()); failEngine("already closed by tragic event on the index writer", (Exception) indexWriter.getTragicException()); engineFailed = true; } else if (translog.isOpen() == false && translog.getTragicException() != null) { @@ -2138,34 +2137,12 @@ protected void doRun() throws Exception { * confidence that the call stack does not contain catch statements that would cause the error that might be thrown * here from being caught and never reaching the uncaught exception handler. */ - maybeDie("fatal error while merging", exc); - logger.error("failed to merge", exc); failEngine("merge failed", new MergePolicy.MergeException(exc, dir)); } }); } } - /** - * If the specified throwable is a fatal error, this throwable will be thrown. Callers should ensure that there are no catch statements - * that would catch an error in the stack as the fatal error here should go uncaught and be handled by the uncaught exception handler - * that we install during bootstrap. If the specified throwable is indeed a fatal error, the specified message will attempt to be logged - * before throwing the fatal error. If the specified throwable is not a fatal error, this method is a no-op. - * - * @param maybeMessage the message to maybe log - * @param maybeFatal the throwable that is maybe fatal - */ - @SuppressWarnings("finally") - private void maybeDie(final String maybeMessage, final Throwable maybeFatal) { - if (maybeFatal instanceof Error) { - try { - logger.error(maybeMessage, maybeFatal); - } finally { - throw (Error) maybeFatal; - } - } - } - /** * Commits the specified index writer. * From bd636e371197f5cc8c510450bb6beb32d91629db Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 9 Mar 2018 09:55:45 -0800 Subject: [PATCH 22/89] Make primary-replica resync failures less lenient (#28534) Today, failures from the primary-replica resync are ignored as the best effort to not mark shards as stale during the cluster restart. However this can be problematic if replicas failed to execute resync operations but just fine in the subsequent write operations. When this happens, replica will miss some operations from the new primary. There are some implications if the local checkpoint on replica can't advance because of the missing operations. 1. The global checkpoint won't advance - this causes both primary and replicas keep many index commits 2. Engine on replica won't flush periodically because uncommitted stats is calculated based on the local checkpoint 3. Replica can use a large number of bitsets to keep track operations seqno However we can prevent this issue but still reserve the best-effort by failing replicas which fail to execute resync operations but not mark them as stale. We have prepared to the required infrastructure in #28049 and #28054 for this change. Relates #24841 --- .../TransportResyncReplicationAction.java | 23 +++++- .../TransportReplicationAction.java | 24 ++++++ .../replication/TransportWriteAction.java | 31 +------ .../cluster/routing/PrimaryAllocationIT.java | 80 +++++++++++++++++++ 4 files changed, 128 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java index 32614c636f128..4e7c66afdcaf0 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -45,6 +46,7 @@ import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; +import java.util.function.Consumer; import java.util.function.Supplier; public class TransportResyncReplicationAction extends TransportWriteAction onPrimaryDemoted, Consumer onIgnoredFailure) { + shardStateAction.remoteShardFailed(replica.shardId(), replica.allocationId().getId(), primaryTerm, false, message, exception, + createShardActionListener(onSuccess, onPrimaryDemoted, onIgnoredFailure)); + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 4398c56f26c77..2cd5f7a5f13ac 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -1172,6 +1172,30 @@ public void markShardCopyAsStaleIfNeeded(ShardId shardId, String allocationId, R // "alive" if it were to be marked as stale. onSuccess.run(); } + + protected final ShardStateAction.Listener createShardActionListener(final Runnable onSuccess, + final Consumer onPrimaryDemoted, + final Consumer onIgnoredFailure) { + return new ShardStateAction.Listener() { + @Override + public void onSuccess() { + onSuccess.run(); + } + + @Override + public void onFailure(Exception shardFailedError) { + if (shardFailedError instanceof ShardStateAction.NoLongerPrimaryShardException) { + onPrimaryDemoted.accept(shardFailedError); + } else { + // these can occur if the node is shutting down and are okay + // any other exception here is not expected and merits investigation + assert shardFailedError instanceof TransportException || + shardFailedError instanceof NodeClosedException : shardFailedError; + onIgnoredFailure.accept(shardFailedError); + } + } + }; + } } /** diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java index 28b8f0826cd91..2a3e8be7aa8bb 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java @@ -384,41 +384,16 @@ class WriteActionReplicasProxy extends ReplicasProxy { @Override public void failShardIfNeeded(ShardRouting replica, String message, Exception exception, Runnable onSuccess, Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { - - logger.warn((org.apache.logging.log4j.util.Supplier) - () -> new ParameterizedMessage("[{}] {}", replica.shardId(), message), exception); + logger.warn(new ParameterizedMessage("[{}] {}", replica.shardId(), message), exception); shardStateAction.remoteShardFailed(replica.shardId(), replica.allocationId().getId(), primaryTerm, true, message, exception, - createListener(onSuccess, onPrimaryDemoted, onIgnoredFailure)); + createShardActionListener(onSuccess, onPrimaryDemoted, onIgnoredFailure)); } @Override public void markShardCopyAsStaleIfNeeded(ShardId shardId, String allocationId, Runnable onSuccess, Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { shardStateAction.remoteShardFailed(shardId, allocationId, primaryTerm, true, "mark copy as stale", null, - createListener(onSuccess, onPrimaryDemoted, onIgnoredFailure)); - } - - private ShardStateAction.Listener createListener(final Runnable onSuccess, final Consumer onPrimaryDemoted, - final Consumer onIgnoredFailure) { - return new ShardStateAction.Listener() { - @Override - public void onSuccess() { - onSuccess.run(); - } - - @Override - public void onFailure(Exception shardFailedError) { - if (shardFailedError instanceof ShardStateAction.NoLongerPrimaryShardException) { - onPrimaryDemoted.accept(shardFailedError); - } else { - // these can occur if the node is shutting down and are okay - // any other exception here is not expected and merits investigation - assert shardFailedError instanceof TransportException || - shardFailedError instanceof NodeClosedException : shardFailedError; - onIgnoredFailure.accept(shardFailedError); - } - } - }; + createShardActionListener(onSuccess, onPrimaryDemoted, onIgnoredFailure)); } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index 86dd2dfe18904..d7a91c988e9da 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequestBuilder; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -31,6 +32,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.gateway.GatewayAllocator; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardTestCase; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; @@ -43,15 +48,23 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.isIn; +import static org.hamcrest.Matchers.not; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class PrimaryAllocationIT extends ESIntegTestCase { @@ -309,4 +322,71 @@ public void testForceAllocatePrimaryOnNoDecision() throws Exception { assertEquals(1, client().admin().cluster().prepareState().get().getState() .routingTable().index(indexName).shardsWithState(ShardRoutingState.STARTED).size()); } + + /** + * This test asserts that replicas failed to execute resync operations will be failed but not marked as stale. + */ + public void testPrimaryReplicaResyncFailed() throws Exception { + String master = internalCluster().startMasterOnlyNode(Settings.EMPTY); + final int numberOfReplicas = between(2, 3); + final String oldPrimary = internalCluster().startDataOnlyNode(); + assertAcked( + prepareCreate("test", Settings.builder().put(indexSettings()) + .put(SETTING_NUMBER_OF_SHARDS, 1) + .put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas))); + final ShardId shardId = new ShardId(clusterService().state().metaData().index("test").getIndex(), 0); + final Set replicaNodes = new HashSet<>(internalCluster().startDataOnlyNodes(numberOfReplicas)); + ensureGreen(); + assertAcked( + client(master).admin().cluster().prepareUpdateSettings() + .setTransientSettings(Settings.builder().put("cluster.routing.allocation.enable", "none")).get()); + logger.info("--> Indexing with gap in seqno to ensure that some operations will be replayed in resync"); + long numDocs = scaledRandomIntBetween(5, 50); + for (int i = 0; i < numDocs; i++) { + IndexResponse indexResult = index("test", "doc", Long.toString(i)); + assertThat(indexResult.getShardInfo().getSuccessful(), equalTo(numberOfReplicas + 1)); + } + final IndexShard oldPrimaryShard = internalCluster().getInstance(IndicesService.class, oldPrimary).getShardOrNull(shardId); + IndexShardTestCase.getEngine(oldPrimaryShard).getLocalCheckpointTracker().generateSeqNo(); // Make gap in seqno. + long moreDocs = scaledRandomIntBetween(1, 10); + for (int i = 0; i < moreDocs; i++) { + IndexResponse indexResult = index("test", "doc", Long.toString(numDocs + i)); + assertThat(indexResult.getShardInfo().getSuccessful(), equalTo(numberOfReplicas + 1)); + } + final Set replicasSide1 = Sets.newHashSet(randomSubsetOf(between(1, numberOfReplicas - 1), replicaNodes)); + final Set replicasSide2 = Sets.difference(replicaNodes, replicasSide1); + NetworkDisruption partition = new NetworkDisruption(new TwoPartitions(replicasSide1, replicasSide2), new NetworkDisconnect()); + internalCluster().setDisruptionScheme(partition); + logger.info("--> isolating some replicas during primary-replica resync"); + partition.startDisrupting(); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(oldPrimary)); + // Checks that we fails replicas in one side but not mark them as stale. + assertBusy(() -> { + ClusterState state = client(master).admin().cluster().prepareState().get().getState(); + final IndexShardRoutingTable shardRoutingTable = state.routingTable().shardRoutingTable(shardId); + final String newPrimaryNode = state.getRoutingNodes().node(shardRoutingTable.primary.currentNodeId()).node().getName(); + assertThat(newPrimaryNode, not(equalTo(oldPrimary))); + Set selectedPartition = replicasSide1.contains(newPrimaryNode) ? replicasSide1 : replicasSide2; + assertThat(shardRoutingTable.activeShards(), hasSize(selectedPartition.size())); + for (ShardRouting activeShard : shardRoutingTable.activeShards()) { + assertThat(state.getRoutingNodes().node(activeShard.currentNodeId()).node().getName(), isIn(selectedPartition)); + } + assertThat(state.metaData().index("test").inSyncAllocationIds(shardId.id()), hasSize(numberOfReplicas + 1)); + }, 1, TimeUnit.MINUTES); + assertAcked( + client(master).admin().cluster().prepareUpdateSettings() + .setTransientSettings(Settings.builder().put("cluster.routing.allocation.enable", "all")).get()); + partition.stopDisrupting(); + logger.info("--> stop disrupting network and re-enable allocation"); + assertBusy(() -> { + ClusterState state = client(master).admin().cluster().prepareState().get().getState(); + assertThat(state.routingTable().shardRoutingTable(shardId).activeShards(), hasSize(numberOfReplicas)); + assertThat(state.metaData().index("test").inSyncAllocationIds(shardId.id()), hasSize(numberOfReplicas + 1)); + for (String node : replicaNodes) { + IndexShard shard = internalCluster().getInstance(IndicesService.class, node).getShardOrNull(shardId); + assertThat(shard.getLocalCheckpoint(), equalTo(numDocs + moreDocs)); + } + }); + } + } From e25ced2418ea9e711209220db4ad5e1e37b4b7de Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sat, 10 Mar 2018 22:56:22 -0500 Subject: [PATCH 23/89] TEST: Use non-zero number for #testCompareUnits In `ByteSizeValueTests#testCompareUnits`, we expect non-zero for the variable `number` however `randomNonNegativeLong` can return zero. CI: https://elasticsearch-ci.elastic.co/job/elastic+elasticsearch+6.2+oracle-java10-periodic/147/console --- .../java/org/elasticsearch/common/unit/ByteSizeValueTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java b/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java index d0b58cafdedfe..52cca6e467338 100644 --- a/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java +++ b/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java @@ -169,7 +169,7 @@ public void testCompareValue() { } public void testCompareUnits() { - long number = randomNonNegativeLong() / ByteSizeUnit.PB.toBytes(1); + long number = randomLongBetween(1, Long.MAX_VALUE/ ByteSizeUnit.PB.toBytes(1)); ByteSizeUnit randomUnit = randomValueOtherThan(ByteSizeUnit.PB, ()->randomFrom(ByteSizeUnit.values())); ByteSizeValue firstByteValue = new ByteSizeValue(number, randomUnit); ByteSizeValue secondByteValue = new ByteSizeValue(number, ByteSizeUnit.PB); From bc73893c859c69ff3dde124895ca75baee2d104a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sun, 11 Mar 2018 09:01:27 -0700 Subject: [PATCH 24/89] Plugins: Allow modules to spawn controllers (#28968) This commit makes the controller spawner also look under modules. It also fixes a bug in module security policy loading where the module is a meta plugin. --- .../bootstrap/SpawnerNoBootstrapTests.java | 51 +++++++++++-------- .../org/elasticsearch/bootstrap/Security.java | 10 +--- .../org/elasticsearch/bootstrap/Spawner.java | 16 +++--- 3 files changed, 42 insertions(+), 35 deletions(-) diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index e4e603dff9503..9f895c44977fc 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -42,6 +42,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -77,6 +78,7 @@ public void testNoControllerSpawn() throws IOException, InterruptedException { // This plugin will NOT have a controller daemon Path plugin = environment.pluginsFile().resolve("a_plugin"); + Files.createDirectories(environment.modulesFile()); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, @@ -97,7 +99,12 @@ public void testNoControllerSpawn() throws IOException, InterruptedException { /** * Two plugins - one with a controller daemon and one without. */ - public void testControllerSpawn() throws IOException, InterruptedException { + public void testControllerSpawn() throws Exception { + assertControllerSpawns(Environment::pluginsFile); + assertControllerSpawns(Environment::modulesFile); + } + + private void assertControllerSpawns(Function pluginsDirFinder) throws Exception { /* * On Windows you can not directly run a batch file - you have to run cmd.exe with the batch * file as an argument and that's out of the remit of the controller daemon process spawner. @@ -112,32 +119,34 @@ public void testControllerSpawn() throws IOException, InterruptedException { Environment environment = TestEnvironment.newEnvironment(settings); // this plugin will have a controller daemon - Path plugin = environment.pluginsFile().resolve("test_plugin"); + Path plugin = pluginsDirFinder.apply(environment).resolve("test_plugin"); + Files.createDirectories(environment.modulesFile()); + Files.createDirectories(environment.pluginsFile()); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( - plugin, - "description", "test_plugin", - "version", Version.CURRENT.toString(), - "elasticsearch.version", Version.CURRENT.toString(), - "name", "test_plugin", - "java.version", "1.8", - "classname", "TestPlugin", - "has.native.controller", "true"); + plugin, + "description", "test_plugin", + "version", Version.CURRENT.toString(), + "elasticsearch.version", Version.CURRENT.toString(), + "name", "test_plugin", + "java.version", "1.8", + "classname", "TestPlugin", + "has.native.controller", "true"); Path controllerProgram = Platforms.nativeControllerPath(plugin); createControllerProgram(controllerProgram); // this plugin will not have a controller daemon - Path otherPlugin = environment.pluginsFile().resolve("other_plugin"); + Path otherPlugin = pluginsDirFinder.apply(environment).resolve("other_plugin"); Files.createDirectories(otherPlugin); PluginTestUtil.writePluginProperties( - otherPlugin, - "description", "other_plugin", - "version", Version.CURRENT.toString(), - "elasticsearch.version", Version.CURRENT.toString(), - "name", "other_plugin", - "java.version", "1.8", - "classname", "OtherPlugin", - "has.native.controller", "false"); + otherPlugin, + "description", "other_plugin", + "version", Version.CURRENT.toString(), + "elasticsearch.version", Version.CURRENT.toString(), + "name", "other_plugin", + "java.version", "1.8", + "classname", "OtherPlugin", + "has.native.controller", "false"); Spawner spawner = new Spawner(); spawner.spawnNativePluginControllers(environment); @@ -150,7 +159,7 @@ public void testControllerSpawn() throws IOException, InterruptedException { assertThat(processes, hasSize(1)); Process process = processes.get(0); final InputStreamReader in = - new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8); + new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8); try (BufferedReader stdoutReader = new BufferedReader(in)) { String line = stdoutReader.readLine(); assertEquals("I am alive", line); @@ -181,6 +190,7 @@ public void testControllerSpawnMetaPlugin() throws IOException, InterruptedExcep Environment environment = TestEnvironment.newEnvironment(settings); Path metaPlugin = environment.pluginsFile().resolve("meta_plugin"); + Files.createDirectories(environment.modulesFile()); Files.createDirectories(metaPlugin); PluginTestUtil.writeMetaPluginProperties( metaPlugin, @@ -279,6 +289,7 @@ public void testSpawnerHandlingOfDesktopServicesStoreFiles() throws IOException final Environment environment = TestEnvironment.newEnvironment(settings); + Files.createDirectories(environment.modulesFile()); Files.createDirectories(environment.pluginsFile()); final Path desktopServicesStore = environment.pluginsFile().resolve(".DS_Store"); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Security.java b/server/src/main/java/org/elasticsearch/bootstrap/Security.java index 57b141383073e..9f2790c94cce6 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -163,16 +163,8 @@ static Map getPluginPermissions(Environment environment) throws I Map map = new HashMap<>(); // collect up set of plugins and modules by listing directories. Set pluginsAndModules = new LinkedHashSet<>(PluginsService.findPluginDirs(environment.pluginsFile())); + pluginsAndModules.addAll(PluginsService.findPluginDirs(environment.modulesFile())); - if (Files.exists(environment.modulesFile())) { - try (DirectoryStream stream = Files.newDirectoryStream(environment.modulesFile())) { - for (Path module : stream) { - if (pluginsAndModules.add(module) == false) { - throw new IllegalStateException("duplicate module: " + module); - } - } - } - } // now process each one for (Path plugin : pluginsAndModules) { Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java index dcaad3c39dd96..08731522a31a3 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java @@ -63,15 +63,20 @@ void spawnNativePluginControllers(final Environment environment) throws IOExcept if (!spawned.compareAndSet(false, true)) { throw new IllegalStateException("native controllers already spawned"); } - final Path pluginsFile = environment.pluginsFile(); - if (!Files.exists(pluginsFile)) { - throw new IllegalStateException("plugins directory [" + pluginsFile + "] not found"); + spawnControllers(environment.pluginsFile(), "plugins", environment.tmpFile()); + spawnControllers(environment.modulesFile(), "modules", environment.tmpFile()); + } + + /** Spawn controllers in plugins found within the given directory. */ + private void spawnControllers(Path pluginsDir, String type, Path tmpDir) throws IOException { + if (!Files.exists(pluginsDir)) { + throw new IllegalStateException(type + " directory [" + pluginsDir + "] not found"); } /* * For each plugin, attempt to spawn the controller daemon. Silently ignore any plugin that * don't include a controller for the correct platform. */ - List paths = PluginsService.findPluginDirs(pluginsFile); + List paths = PluginsService.findPluginDirs(pluginsDir); for (Path plugin : paths) { final PluginInfo info = PluginInfo.readFromProperties(plugin); final Path spawnPath = Platforms.nativeControllerPath(plugin); @@ -85,8 +90,7 @@ void spawnNativePluginControllers(final Environment environment) throws IOExcept plugin.getFileName()); throw new IllegalArgumentException(message); } - final Process process = - spawnNativePluginController(spawnPath, environment.tmpFile()); + final Process process = spawnNativePluginController(spawnPath, tmpDir); processes.add(process); } } From 083bfa63f141d89b7f1533031a217a69e6e4078e Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Mon, 12 Mar 2018 07:58:07 +0100 Subject: [PATCH 25/89] Honor RUNTIME_JAVA_HOME for benchmarks (#28962) With this commit we configure our microbenchmarks project to use the configured RUNTIME_JAVA_HOME and to fallback on JAVA_HOME so this behavior is consistent with the rest of the Elasticsearch build. Closes #28961 --- benchmarks/build.gradle | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index 929e10f885ac7..1a1145a237a76 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -85,6 +85,10 @@ shadowJar { classifier = 'benchmarks' } +runShadow { + executable = new File(project.runtimeJavaHome, 'bin/java') +} + // alias the shadowJar and runShadow tasks to abstract from the concrete plugin that we are using and provide a more consistent interface task jmhJar( dependsOn: shadowJar, From 941a52503c34fb7ffe7ecc25cf65c06d203d80fe Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 12 Mar 2018 09:58:20 +0100 Subject: [PATCH 26/89] Restore tiebreaker for cross fields query (#28935) This commit restores the handling of tiebreaker for multi_match cross fields query. This functionality was lost during a refactoring of the multi_match query (#25115). Fixes #28933 --- .../index/search/MultiMatchQuery.java | 12 ++++---- .../index/search/MultiMatchQueryTests.java | 29 ++++++++++++------- 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index c559aa9458b5b..7df1b756579b4 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -81,7 +81,7 @@ public Query parse(MultiMatchQueryBuilder.Type type, Map fieldNam queryBuilder = new QueryBuilder(tieBreaker); break; case CROSS_FIELDS: - queryBuilder = new CrossFieldsQueryBuilder(); + queryBuilder = new CrossFieldsQueryBuilder(tieBreaker); break; default: throw new IllegalStateException("No such type: " + type); @@ -151,8 +151,8 @@ public Query blendPhrase(PhraseQuery query, MappedFieldType type) { final class CrossFieldsQueryBuilder extends QueryBuilder { private FieldAndFieldType[] blendedFields; - CrossFieldsQueryBuilder() { - super(0.0f); + CrossFieldsQueryBuilder(float tiebreaker) { + super(tiebreaker); } @Override @@ -238,7 +238,7 @@ public Query blendPhrase(PhraseQuery query, MappedFieldType type) { /** * We build phrase queries for multi-word synonyms when {@link QueryBuilder#autoGenerateSynonymsPhraseQuery} is true. */ - return MultiMatchQuery.blendPhrase(query, blendedFields); + return MultiMatchQuery.blendPhrase(query, tieBreaker, blendedFields); } } @@ -313,7 +313,7 @@ static Query blendTerms(QueryShardContext context, BytesRef[] values, Float comm * Expand a {@link PhraseQuery} to multiple fields that share the same analyzer. * Returns a {@link DisjunctionMaxQuery} with a disjunction for each expanded field. */ - static Query blendPhrase(PhraseQuery query, FieldAndFieldType... fields) { + static Query blendPhrase(PhraseQuery query, float tiebreaker, FieldAndFieldType... fields) { List disjunctions = new ArrayList<>(); for (FieldAndFieldType field : fields) { int[] positions = query.getPositions(); @@ -328,7 +328,7 @@ static Query blendPhrase(PhraseQuery query, FieldAndFieldType... fields) { } disjunctions.add(q); } - return new DisjunctionMaxQuery(disjunctions, 0.0f); + return new DisjunctionMaxQuery(disjunctions, tiebreaker); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 0f6429f7f30b3..64c07267b77ca 100644 --- a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -96,17 +96,24 @@ public void testCrossFieldMultiMatchQuery() throws IOException { QueryShardContext queryShardContext = indexService.newQueryShardContext( randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null); queryShardContext.setAllowUnmappedFields(true); - Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext); - try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { - Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); - Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2); - Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3); - Query expected = new DisjunctionMaxQuery( - Arrays.asList( - new MatchNoDocsQuery("unknown field foobar"), - new DisjunctionMaxQuery(Arrays.asList(tq2, tq1), 0f) - ), 0f); - assertEquals(expected, rewrittenQuery); + for (float tieBreaker : new float[] {0.0f, 0.5f}) { + Query parsedQuery = multiMatchQuery("banon") + .field("name.first", 2) + .field("name.last", 3).field("foobar") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .tieBreaker(tieBreaker) + .toQuery(queryShardContext); + try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { + Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); + Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2); + Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3); + Query expected = new DisjunctionMaxQuery( + Arrays.asList( + new MatchNoDocsQuery("unknown field foobar"), + new DisjunctionMaxQuery(Arrays.asList(tq2, tq1), tieBreaker) + ), tieBreaker); + assertEquals(expected, rewrittenQuery); + } } } From d36d44e696323462290e3460537ccae49a1f3708 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Sun, 4 Mar 2018 11:31:13 -0800 Subject: [PATCH 27/89] Try if tombstone is eligable for pruning before locking on it's key (#28767) Pruning tombstones is quite expensive since we have to walk though all deletes in the live version map and acquire a lock on every value even though it's impossible to prune it. This change does a pre-check if a delete is old enough and if not it skips acquireing the lock. --- .../index/engine/LiveVersionMap.java | 40 +++++++++++-------- 1 file changed, 23 insertions(+), 17 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java index 9c111ebc645bd..fc62f1fb32e2b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java @@ -375,25 +375,31 @@ void removeTombstoneUnderLock(BytesRef uid) { } } + private boolean canRemoveTombstone(long currentTime, long pruneInterval, DeleteVersionValue versionValue) { + // check if the value is old enough to be removed + final boolean isTooOld = currentTime - versionValue.time > pruneInterval; + // version value can't be removed it's + // not yet flushed to lucene ie. it's part of this current maps object + final boolean isNotTrackedByCurrentMaps = versionValue.time < maps.getMinDeleteTimestamp(); + return isTooOld && isNotTrackedByCurrentMaps; + } + void pruneTombstones(long currentTime, long pruneInterval) { for (Map.Entry entry : tombstones.entrySet()) { - final BytesRef uid = entry.getKey(); - try (Releasable lock = keyedLock.tryAcquire(uid)) { - // we use tryAcquire here since this is a best effort and we try to be least disruptive - // this method is also called under lock in the engine under certain situations such that this can lead to deadlocks - // if we do use a blocking acquire. see #28714 - if (lock != null) { // did we get the lock? - // can we do it without this lock on each value? maybe batch to a set and get the lock once per set? - // Must re-get it here, vs using entry.getValue(), in case the uid was indexed/deleted since we pulled the iterator: - final DeleteVersionValue versionValue = tombstones.get(uid); - if (versionValue != null) { - // check if the value is old enough to be removed - final boolean isTooOld = currentTime - versionValue.time > pruneInterval; - if (isTooOld) { - // version value can't be removed it's - // not yet flushed to lucene ie. it's part of this current maps object - final boolean isNotTrackedByCurrentMaps = versionValue.time < maps.getMinDeleteTimestamp(); - if (isNotTrackedByCurrentMaps) { + // we do check before we actually lock the key - this way we don't need to acquire the lock for tombstones that are not + // prune-able. If the tombstone changes concurrently we will re-read and step out below since if we can't collect it now w + // we won't collect the tombstone below since it must be newer than this one. + if (canRemoveTombstone(currentTime, pruneInterval, entry.getValue())) { + final BytesRef uid = entry.getKey(); + try (Releasable lock = keyedLock.tryAcquire(uid)) { + // we use tryAcquire here since this is a best effort and we try to be least disruptive + // this method is also called under lock in the engine under certain situations such that this can lead to deadlocks + // if we do use a blocking acquire. see #28714 + if (lock != null) { // did we get the lock? + // Must re-get it here, vs using entry.getValue(), in case the uid was indexed/deleted since we pulled the iterator: + final DeleteVersionValue versionValue = tombstones.get(uid); + if (versionValue != null) { + if (canRemoveTombstone(currentTime, pruneInterval, versionValue)) { removeTombstoneUnderLock(uid); } } From 053af9449970dca871b006fdff12d855d6326c32 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 12 Mar 2018 03:05:24 -0700 Subject: [PATCH 28/89] Disallow logger methods with Object parameter (#28969) Log4j2 provides a wide range of logging methods. Our code typically only uses a subset of them. In particular, uses of the methods trace|debug|info|warn|error|fatal(Object) or trace|debug|info|warn|error|fatal(Object, Throwable) have all been wrong, leading to not properly logging the provided message. To prevent these issues in the future, the corresponding Logger methods have been blacklisted. --- .../forbidden/es-server-signatures.txt | 14 ++++++++++++++ .../QueueResizingEsThreadPoolExecutor.java | 2 +- .../org/elasticsearch/index/IndexService.java | 8 ++++---- .../elasticsearch/indices/IndicesService.java | 18 +++++++++--------- .../org/elasticsearch/rest/RestController.java | 2 +- .../transport/TransportService.java | 15 +++++++-------- 6 files changed, 36 insertions(+), 23 deletions(-) diff --git a/buildSrc/src/main/resources/forbidden/es-server-signatures.txt b/buildSrc/src/main/resources/forbidden/es-server-signatures.txt index 9db17aaac0e93..d0757038c599e 100644 --- a/buildSrc/src/main/resources/forbidden/es-server-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-server-signatures.txt @@ -133,3 +133,17 @@ java.time.OffsetDateTime#withYear(int) java.time.zone.ZoneRules#getStandardOffset(java.time.Instant) java.time.zone.ZoneRules#getDaylightSavings(java.time.Instant) java.time.zone.ZoneRules#isDaylightSavings(java.time.Instant) + +@defaultMessage Use logger methods with non-Object parameter +org.apache.logging.log4j.Logger#trace(java.lang.Object) +org.apache.logging.log4j.Logger#trace(java.lang.Object, java.lang.Throwable) +org.apache.logging.log4j.Logger#debug(java.lang.Object) +org.apache.logging.log4j.Logger#debug(java.lang.Object, java.lang.Throwable) +org.apache.logging.log4j.Logger#info(java.lang.Object) +org.apache.logging.log4j.Logger#info(java.lang.Object, java.lang.Throwable) +org.apache.logging.log4j.Logger#warn(java.lang.Object) +org.apache.logging.log4j.Logger#warn(java.lang.Object, java.lang.Throwable) +org.apache.logging.log4j.Logger#error(java.lang.Object) +org.apache.logging.log4j.Logger#error(java.lang.Object, java.lang.Throwable) +org.apache.logging.log4j.Logger#fatal(java.lang.Object) +org.apache.logging.log4j.Logger#fatal(java.lang.Object, java.lang.Throwable) diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java index b62f93d833de0..7cf019bd6c58b 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java @@ -197,7 +197,7 @@ protected void afterExecute(Runnable r, Throwable t) { } } catch (ArithmeticException e) { // There was an integer overflow, so just log about it, rather than adjust the queue size - logger.warn((Supplier) () -> new ParameterizedMessage( + logger.warn(() -> new ParameterizedMessage( "failed to calculate optimal queue size for [{}] thread pool, " + "total frame time [{}ns], tasks [{}], task execution time [{}ns]", getName(), totalRuntime, tasksPerFrame, totalNanos), diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 2f508858b8206..df71471a3caae 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -447,7 +447,7 @@ private void closeShard(String reason, ShardId sId, IndexShard indexShard, Store } } catch (Exception e) { logger.warn( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "[{}] failed to close store on shard removal (reason: [{}])", shardId, reason), e); } } @@ -466,7 +466,7 @@ private void onShardClose(ShardLock lock) { } catch (IOException e) { shardStoreDeleter.addPendingDelete(lock.getShardId(), indexSettings); logger.debug( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "[{}] failed to delete shard content - scheduled a retry", lock.getShardId().id()), e); } } @@ -622,7 +622,7 @@ public synchronized void updateMetaData(final IndexMetaData metadata) { shard.onSettingsChanged(); } catch (Exception e) { logger.warn( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "[{}] failed to notify shard about setting change", shard.shardId().id()), e); } } @@ -814,7 +814,7 @@ public final void run() { if (lastThrownException == null || sameException(lastThrownException, ex) == false) { // prevent the annoying fact of logging the same stuff all the time with an interval of 1 sec will spam all your logs indexService.logger.warn( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "failed to run task {} - suppressing re-occurring exceptions unless the exception changes", toString()), ex); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 7c4f54a782051..15ab826445747 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -311,7 +311,7 @@ Map> statsByShard(final IndicesService indicesServi } } catch (IllegalIndexShardStateException | AlreadyClosedException e) { // we can safely ignore illegal state on ones that are closing for example - logger.trace((Supplier) () -> new ParameterizedMessage("{} ignoring shard stats", indexShard.shardId()), e); + logger.trace(() -> new ParameterizedMessage("{} ignoring shard stats", indexShard.shardId()), e); } } } @@ -561,7 +561,7 @@ public void removeIndex(final Index index, final IndexRemovalReason reason, fina deleteIndexStore(extraInfo, indexService.index(), indexSettings); } } catch (Exception e) { - logger.warn((Supplier) () -> new ParameterizedMessage("failed to remove index {} ([{}][{}])", index, reason, extraInfo), e); + logger.warn(() -> new ParameterizedMessage("failed to remove index {} ([{}][{}])", index, reason, extraInfo), e); } } @@ -617,7 +617,7 @@ public void deleteUnassignedIndex(String reason, IndexMetaData metaData, Cluster } deleteIndexStore(reason, metaData, clusterState); } catch (Exception e) { - logger.warn((Supplier) () -> new ParameterizedMessage("[{}] failed to delete unassigned index (reason [{}])", metaData.getIndex(), reason), e); + logger.warn(() -> new ParameterizedMessage("[{}] failed to delete unassigned index (reason [{}])", metaData.getIndex(), reason), e); } } } @@ -669,9 +669,9 @@ private void deleteIndexStoreIfDeletionAllowed(final String reason, final Index } success = true; } catch (LockObtainFailedException ex) { - logger.debug((Supplier) () -> new ParameterizedMessage("{} failed to delete index store - at least one shards is still locked", index), ex); + logger.debug(() -> new ParameterizedMessage("{} failed to delete index store - at least one shards is still locked", index), ex); } catch (Exception ex) { - logger.warn((Supplier) () -> new ParameterizedMessage("{} failed to delete index", index), ex); + logger.warn(() -> new ParameterizedMessage("{} failed to delete index", index), ex); } finally { if (success == false) { addPendingDelete(index, indexSettings); @@ -774,7 +774,7 @@ public IndexMetaData verifyIndexIsDeleted(final Index index, final ClusterState try { metaData = metaStateService.loadIndexState(index); } catch (Exception e) { - logger.warn((Supplier) () -> new ParameterizedMessage("[{}] failed to load state file from a stale deleted index, folders will be left on disk", index), e); + logger.warn(() -> new ParameterizedMessage("[{}] failed to load state file from a stale deleted index, folders will be left on disk", index), e); return null; } final IndexSettings indexSettings = buildIndexSettings(metaData); @@ -783,7 +783,7 @@ public IndexMetaData verifyIndexIsDeleted(final Index index, final ClusterState } catch (Exception e) { // we just warn about the exception here because if deleteIndexStoreIfDeletionAllowed // throws an exception, it gets added to the list of pending deletes to be tried again - logger.warn((Supplier) () -> new ParameterizedMessage("[{}] failed to delete index on disk", metaData.getIndex()), e); + logger.warn(() -> new ParameterizedMessage("[{}] failed to delete index on disk", metaData.getIndex()), e); } return metaData; } @@ -960,7 +960,7 @@ public void processPendingDeletes(Index index, IndexSettings indexSettings, Time nodeEnv.deleteIndexDirectoryUnderLock(index, indexSettings); iterator.remove(); } catch (IOException ex) { - logger.debug((Supplier) () -> new ParameterizedMessage("{} retry pending delete", index), ex); + logger.debug(() -> new ParameterizedMessage("{} retry pending delete", index), ex); } } else { assert delete.shardId != -1; @@ -970,7 +970,7 @@ public void processPendingDeletes(Index index, IndexSettings indexSettings, Time deleteShardStore("pending delete", shardLock, delete.settings); iterator.remove(); } catch (IOException ex) { - logger.debug((Supplier) () -> new ParameterizedMessage("{} retry pending delete", shardLock.getShardId()), ex); + logger.debug(() -> new ParameterizedMessage("{} retry pending delete", shardLock.getShardId()), ex); } } else { logger.warn("{} no shard lock for pending delete", delete.shardId); diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 99f368a682fe0..d6aba28ce27eb 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -177,7 +177,7 @@ public void dispatchRequest(RestRequest request, RestChannel channel, ThreadCont channel.sendResponse(new BytesRestResponse(channel, e)); } catch (Exception inner) { inner.addSuppressed(e); - logger.error((Supplier) () -> + logger.error(() -> new ParameterizedMessage("failed to send failure response for uri [{}]", request.uri()), inner); } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 7687844231ccd..5af0ba5eedc72 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -249,7 +249,7 @@ protected void doStop() { public void onRejection(Exception e) { // if we get rejected during node shutdown we don't wanna bubble it up logger.debug( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "failed to notify response handler on rejection, action: {}", holderToNotify.action()), e); @@ -257,7 +257,7 @@ public void onRejection(Exception e) { @Override public void onFailure(Exception e) { logger.warn( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "failed to notify response handler on exception, action: {}", holderToNotify.action()), e); @@ -611,7 +611,7 @@ private void sendRequestInternal(final Transport.C public void onRejection(Exception e) { // if we get rejected during node shutdown we don't wanna bubble it up logger.debug( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "failed to notify response handler on rejection, action: {}", holderToNotify.action()), e); @@ -619,7 +619,7 @@ public void onRejection(Exception e) { @Override public void onFailure(Exception e) { logger.warn( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "failed to notify response handler on exception, action: {}", holderToNotify.action()), e); @@ -667,8 +667,7 @@ public void onFailure(Exception e) { channel.sendResponse(e); } catch (Exception inner) { inner.addSuppressed(e); - logger.warn( - (Supplier) () -> new ParameterizedMessage( + logger.warn(() -> new ParameterizedMessage( "failed to notify channel of error message for action [{}]", action), inner); } } @@ -681,7 +680,7 @@ public void onFailure(Exception e) { } catch (Exception inner) { inner.addSuppressed(e); logger.warn( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "failed to notify channel of error message for action [{}]", action), inner); } } @@ -1191,7 +1190,7 @@ protected void processException(final TransportResponseHandler handler, final Re handler.handleException(rtx); } catch (Exception e) { logger.error( - (Supplier) () -> new ParameterizedMessage( + () -> new ParameterizedMessage( "failed to handle exception for action [{}], handler [{}]", action, handler), e); } } From 8fe5cb046081b0921fede1b4ea2ec00ab94a501a Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 12 Mar 2018 08:42:02 -0400 Subject: [PATCH 29/89] Avoid class cast exception from index writer (#28989) When an index writer encounters a tragic exception, it could be a Throwable and not an Exception. Yet we blindly cast the tragic exception to an Exception which can encounter a ClassCastException. This commit addresses this by checking if the tragic exception is an Exception and otherwise wrapping the Throwable in a RuntimeException if it is not. We choose to wrap the Throwable instead of passing it around because passing it around leads to changing a lot of places where we handle Exception to handle Throwable instead. In general, we have tried to avoid handling Throwable and instead let those bubble up to the uncaught exception handler. --- .../org/elasticsearch/index/engine/InternalEngine.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index dffd45b62badd..dc9277a76e351 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -1788,7 +1788,13 @@ private boolean failOnTragicEvent(AlreadyClosedException ex) { // we need to fail the engine. it might have already been failed before // but we are double-checking it's failed and closed if (indexWriter.isOpen() == false && indexWriter.getTragicException() != null) { - failEngine("already closed by tragic event on the index writer", (Exception) indexWriter.getTragicException()); + final Exception tragicException; + if (indexWriter.getTragicException() instanceof Exception) { + tragicException = (Exception) indexWriter.getTragicException(); + } else { + tragicException = new RuntimeException(indexWriter.getTragicException()); + } + failEngine("already closed by tragic event on the index writer", tragicException); engineFailed = true; } else if (translog.isOpen() == false && translog.getTragicException() != null) { failEngine("already closed by tragic event on the translog", translog.getTragicException()); From 9350d8eae654ebdf48c8bfde6b7ee78dd5c371af Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Mon, 12 Mar 2018 08:07:33 +0100 Subject: [PATCH 30/89] Reduce heap-memory usage of ingest-geoip plugin (#28963) With this commit we reduce heap usage of the ingest-geoip plugin by memory-mapping the database files. Previously, we have stored these files gzip-compressed but this has resulted that data are loaded on the heap. Closes #28782 --- docs/plugins/ingest-geoip.asciidoc | 8 ++-- plugins/ingest-geoip/build.gradle | 4 +- .../ingest/geoip/GeoIpProcessor.java | 6 +-- .../ingest/geoip/IngestGeoIpPlugin.java | 30 ++++++++----- .../geoip/GeoIpProcessorFactoryTests.java | 44 +++++++++---------- .../ingest/geoip/GeoIpProcessorTests.java | 30 ++++++------- .../test/ingest_geoip/20_geoip_processor.yml | 4 +- 7 files changed, 66 insertions(+), 60 deletions(-) diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc index b06708331bc0d..32516d07bef37 100644 --- a/docs/plugins/ingest-geoip.asciidoc +++ b/docs/plugins/ingest-geoip.asciidoc @@ -9,8 +9,8 @@ The ingest-geoip plugin ships by default with the GeoLite2 City, GeoLite2 Countr under the CCA-ShareAlike 4.0 license. For more details see, http://dev.maxmind.com/geoip/geoip2/geolite2/ The GeoIP processor can run with other geoip2 databases from Maxmind. The files must be copied into the geoip config directory, -and the `database_file` option should be used to specify the filename of the custom database. Custom database files must be compressed -with gzip. The geoip config directory is located at `$ES_HOME/config/ingest-geoip` and holds the shipped databases too. +and the `database_file` option should be used to specify the filename of the custom database. Custom database files must be stored +uncompressed. The geoip config directory is located at `$ES_HOME/config/ingest-geoip` and holds the shipped databases too. :plugin_name: ingest-geoip include::install_remove.asciidoc[] @@ -25,7 +25,7 @@ include::install_remove.asciidoc[] | Name | Required | Default | Description | `field` | yes | - | The field to get the ip address from for the geographical lookup. | `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. -| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb.gz and GeoLite2-Country.mmdb.gz files. +| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb, GeoLite2-Country.mmdb and GeoLite2-ASN.mmdb files. | `properties` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup. | `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== @@ -101,7 +101,7 @@ PUT _ingest/pipeline/geoip "geoip" : { "field" : "ip", "target_field" : "geo", - "database_file" : "GeoLite2-Country.mmdb.gz" + "database_file" : "GeoLite2-Country.mmdb" } } ] diff --git a/plugins/ingest-geoip/build.gradle b/plugins/ingest-geoip/build.gradle index 15dda4f4c102c..54facc5aad23c 100644 --- a/plugins/ingest-geoip/build.gradle +++ b/plugins/ingest-geoip/build.gradle @@ -30,13 +30,13 @@ dependencies { compile("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") compile('com.maxmind.db:maxmind-db:1.2.2') - testCompile 'org.elasticsearch:geolite2-databases:20171206' + testCompile 'org.elasticsearch:geolite2-databases:20180303' } task copyDefaultGeoIp2DatabaseFiles(type: Copy) { from { zipTree(configurations.testCompile.files.find { it.name.contains('geolite2-databases')}) } into "${project.buildDir}/ingest-geoip" - include "*.mmdb.gz" + include "*.mmdb" } project.bundlePlugin.dependsOn(copyDefaultGeoIp2DatabaseFiles) diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 0f192bd595501..f1b4b33017e3d 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -37,7 +37,6 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; -import java.io.IOException; import java.net.InetAddress; import java.security.AccessController; import java.security.PrivilegedAction; @@ -68,8 +67,7 @@ public final class GeoIpProcessor extends AbstractProcessor { private final Set properties; private final boolean ignoreMissing; - GeoIpProcessor(String tag, String field, DatabaseReader dbReader, String targetField, Set properties, - boolean ignoreMissing) throws IOException { + GeoIpProcessor(String tag, String field, DatabaseReader dbReader, String targetField, Set properties, boolean ignoreMissing) { super(tag); this.field = field; this.targetField = targetField; @@ -323,7 +321,7 @@ public GeoIpProcessor create(Map registry, String pro Map config) throws Exception { String ipField = readStringProperty(TYPE, processorTag, config, "field"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "geoip"); - String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb.gz"); + String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb"); List propertyNames = readOptionalList(TYPE, processorTag, config, "properties"); boolean ignoreMissing = readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 1571bc99ea4a4..10a65d0274228 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -21,8 +21,11 @@ import com.maxmind.db.NoCache; import com.maxmind.db.NodeCache; +import com.maxmind.db.Reader; import com.maxmind.geoip2.DatabaseReader; import org.apache.lucene.util.IOUtils; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugins.IngestPlugin; @@ -30,11 +33,9 @@ import java.io.Closeable; import java.io.IOException; -import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.PathMatcher; -import java.nio.file.StandardOpenOption; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -42,7 +43,6 @@ import java.util.List; import java.util.Map; import java.util.stream.Stream; -import java.util.zip.GZIPInputStream; public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable { public static final Setting CACHE_SIZE = @@ -80,21 +80,26 @@ static Map loadDatabaseReaders(Path geoIpConfi if (Files.exists(geoIpConfigDirectory) == false && Files.isDirectory(geoIpConfigDirectory)) { throw new IllegalStateException("the geoip directory [" + geoIpConfigDirectory + "] containing databases doesn't exist"); } - + boolean loadDatabaseOnHeap = Booleans.parseBoolean(System.getProperty("es.geoip.load_db_on_heap", "false")); Map databaseReaders = new HashMap<>(); try (Stream databaseFiles = Files.list(geoIpConfigDirectory)) { - PathMatcher pathMatcher = geoIpConfigDirectory.getFileSystem().getPathMatcher("glob:**.mmdb.gz"); + PathMatcher pathMatcher = geoIpConfigDirectory.getFileSystem().getPathMatcher("glob:**.mmdb"); // Use iterator instead of forEach otherwise IOException needs to be caught twice... Iterator iterator = databaseFiles.iterator(); while (iterator.hasNext()) { Path databasePath = iterator.next(); if (Files.isRegularFile(databasePath) && pathMatcher.matches(databasePath)) { String databaseFileName = databasePath.getFileName().toString(); - DatabaseReaderLazyLoader holder = new DatabaseReaderLazyLoader(databaseFileName, () -> { - try (InputStream inputStream = new GZIPInputStream(Files.newInputStream(databasePath, StandardOpenOption.READ))) { - return new DatabaseReader.Builder(inputStream).withCache(cache).build(); - } - }); + DatabaseReaderLazyLoader holder = new DatabaseReaderLazyLoader(databaseFileName, + () -> { + DatabaseReader.Builder builder = createDatabaseBuilder(databasePath).withCache(cache); + if (loadDatabaseOnHeap) { + builder.fileMode(Reader.FileMode.MEMORY); + } else { + builder.fileMode(Reader.FileMode.MEMORY_MAPPED); + } + return builder.build(); + }); databaseReaders.put(databaseFileName, holder); } } @@ -102,6 +107,11 @@ static Map loadDatabaseReaders(Path geoIpConfi return Collections.unmodifiableMap(databaseReaders); } + @SuppressForbidden(reason = "Maxmind API requires java.io.File") + private static DatabaseReader.Builder createDatabaseBuilder(Path databasePath) { + return new DatabaseReader.Builder(databasePath.toFile()); + } + @Override public void close() throws IOException { if (databaseReaders != null) { diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 58119cc1af983..0cc9e8a484747 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -54,12 +54,12 @@ public static void loadDatabaseReaders() throws IOException { Path configDir = createTempDir(); Path geoIpConfigDir = configDir.resolve("ingest-geoip"); Files.createDirectories(geoIpConfigDir); - Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb.gz")), - geoIpConfigDir.resolve("GeoLite2-City.mmdb.gz")); - Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb.gz")), - geoIpConfigDir.resolve("GeoLite2-Country.mmdb.gz")); - Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-ASN.mmdb.gz")), - geoIpConfigDir.resolve("GeoLite2-ASN.mmdb.gz")); + Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")), + geoIpConfigDir.resolve("GeoLite2-City.mmdb")); + Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")), + geoIpConfigDir.resolve("GeoLite2-Country.mmdb")); + Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-ASN.mmdb")), + geoIpConfigDir.resolve("GeoLite2-ASN.mmdb")); NodeCache cache = randomFrom(NoCache.getInstance(), new GeoIpCache(randomNonNegativeLong())); databaseReaders = IngestGeoIpPlugin.loadDatabaseReaders(geoIpConfigDir, cache); @@ -111,7 +111,7 @@ public void testCountryBuildDefaults() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); - config.put("database_file", "GeoLite2-Country.mmdb.gz"); + config.put("database_file", "GeoLite2-Country.mmdb"); String processorTag = randomAlphaOfLength(10); GeoIpProcessor processor = factory.create(null, processorTag, config); @@ -129,7 +129,7 @@ public void testAsnBuildDefaults() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); - config.put("database_file", "GeoLite2-ASN.mmdb.gz"); + config.put("database_file", "GeoLite2-ASN.mmdb"); String processorTag = randomAlphaOfLength(10); GeoIpProcessor processor = factory.create(null, processorTag, config); @@ -157,7 +157,7 @@ public void testBuildDbFile() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("field", "_field"); - config.put("database_file", "GeoLite2-Country.mmdb.gz"); + config.put("database_file", "GeoLite2-Country.mmdb"); GeoIpProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); @@ -170,7 +170,7 @@ public void testBuildWithCountryDbAndAsnFields() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("field", "_field"); - config.put("database_file", "GeoLite2-Country.mmdb.gz"); + config.put("database_file", "GeoLite2-Country.mmdb"); EnumSet asnOnlyProperties = EnumSet.copyOf(GeoIpProcessor.Property.ALL_ASN_PROPERTIES); asnOnlyProperties.remove(GeoIpProcessor.Property.IP); String asnProperty = RandomPicks.randomFrom(Randomness.get(), asnOnlyProperties).toString(); @@ -184,7 +184,7 @@ public void testBuildWithAsnDbAndCityFields() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("field", "_field"); - config.put("database_file", "GeoLite2-ASN.mmdb.gz"); + config.put("database_file", "GeoLite2-ASN.mmdb"); EnumSet cityOnlyProperties = EnumSet.copyOf(GeoIpProcessor.Property.ALL_CITY_PROPERTIES); cityOnlyProperties.remove(GeoIpProcessor.Property.IP); String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); @@ -199,9 +199,9 @@ public void testBuildNonExistingDbFile() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); - config.put("database_file", "does-not-exist.mmdb.gz"); + config.put("database_file", "does-not-exist.mmdb"); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); - assertThat(e.getMessage(), equalTo("[database_file] database file [does-not-exist.mmdb.gz] doesn't exist")); + assertThat(e.getMessage(), equalTo("[database_file] database file [does-not-exist.mmdb] doesn't exist")); } public void testBuildFields() throws Exception { @@ -249,12 +249,12 @@ public void testLazyLoading() throws Exception { Path configDir = createTempDir(); Path geoIpConfigDir = configDir.resolve("ingest-geoip"); Files.createDirectories(geoIpConfigDir); - Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb.gz")), - geoIpConfigDir.resolve("GeoLite2-City.mmdb.gz")); - Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb.gz")), - geoIpConfigDir.resolve("GeoLite2-Country.mmdb.gz")); - Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-ASN.mmdb.gz")), - geoIpConfigDir.resolve("GeoLite2-ASN.mmdb.gz")); + Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")), + geoIpConfigDir.resolve("GeoLite2-City.mmdb")); + Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")), + geoIpConfigDir.resolve("GeoLite2-Country.mmdb")); + Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-ASN.mmdb")), + geoIpConfigDir.resolve("GeoLite2-ASN.mmdb")); // Loading another database reader instances, because otherwise we can't test lazy loading as the // database readers used at class level are reused between tests. (we want to keep that otherwise running this @@ -268,15 +268,15 @@ public void testLazyLoading() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); - config.put("database_file", "GeoLite2-City.mmdb.gz"); + config.put("database_file", "GeoLite2-City.mmdb"); factory.create(null, "_tag", config); config = new HashMap<>(); config.put("field", "_field"); - config.put("database_file", "GeoLite2-Country.mmdb.gz"); + config.put("database_file", "GeoLite2-Country.mmdb"); factory.create(null, "_tag", config); config = new HashMap<>(); config.put("field", "_field"); - config.put("database_file", "GeoLite2-ASN.mmdb.gz"); + config.put("database_file", "GeoLite2-ASN.mmdb"); factory.create(null, "_tag", config); for (DatabaseReaderLazyLoader lazyLoader : databaseReaders.values()) { diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index e58cf21f25d0c..48a1769cbf82f 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -24,13 +24,11 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.Map; -import java.util.zip.GZIPInputStream; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.containsString; @@ -40,7 +38,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCity() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); @@ -64,7 +62,7 @@ public void testCity() throws Exception { } public void testNullValueWithIgnoreMissing() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), @@ -75,7 +73,7 @@ public void testNullValueWithIgnoreMissing() throws Exception { } public void testNonExistentWithIgnoreMissing() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); @@ -85,7 +83,7 @@ public void testNonExistentWithIgnoreMissing() throws Exception { } public void testNullWithoutIgnoreMissing() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), @@ -96,7 +94,7 @@ public void testNullWithoutIgnoreMissing() throws Exception { } public void testNonExistentWithoutIgnoreMissing() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); @@ -106,7 +104,7 @@ public void testNonExistentWithoutIgnoreMissing() throws Exception { } public void testCity_withIpV6() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); @@ -134,7 +132,7 @@ public void testCity_withIpV6() throws Exception { } public void testCityWithMissingLocation() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); @@ -151,7 +149,7 @@ public void testCityWithMissingLocation() throws Exception { } public void testCountry() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); @@ -171,7 +169,7 @@ public void testCountry() throws Exception { } public void testCountryWithMissingLocation() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); @@ -189,7 +187,7 @@ public void testCountryWithMissingLocation() throws Exception { public void testAsn() throws Exception { String ip = "82.170.213.79"; - InputStream database = getDatabaseFileInputStream("/GeoLite2-ASN.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-ASN.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); @@ -208,7 +206,7 @@ public void testAsn() throws Exception { } public void testAddressIsNotInTheDatabase() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); @@ -221,7 +219,7 @@ public void testAddressIsNotInTheDatabase() throws Exception { /** Don't silently do DNS lookups or anything trappy on bogus data */ public void testInvalid() throws Exception { - InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); + InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); @@ -232,8 +230,8 @@ public void testInvalid() throws Exception { assertThat(e.getMessage(), containsString("not an IP string literal")); } - private static InputStream getDatabaseFileInputStream(String path) throws IOException { - return new GZIPInputStream(GeoIpProcessor.class.getResourceAsStream(path)); + private static InputStream getDatabaseFileInputStream(String path) { + return GeoIpProcessor.class.getResourceAsStream(path); } } diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml index 3c9661cc5853b..0c400c3c0eabe 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml @@ -98,7 +98,7 @@ { "geoip" : { "field" : "field1", - "database_file" : "GeoLite2-Country.mmdb.gz" + "database_file" : "GeoLite2-Country.mmdb" } } ] @@ -208,7 +208,7 @@ { "geoip" : { "field" : "field1", - "database_file" : "GeoLite2-ASN.mmdb.gz" + "database_file" : "GeoLite2-ASN.mmdb" } } ] From ada047dec52dbdeb6c420c9cb782c9228633ab55 Mon Sep 17 00:00:00 2001 From: Shane O'Grady Date: Mon, 12 Mar 2018 11:37:11 -0300 Subject: [PATCH 31/89] [discovery-gce] Align code examples and documentation (#28876) The docs state that `_gce_` is recommended but the code sample states that `_gce:hostname_` is recommended. This aligns the code sample with the documentation. Also replace `type` with `zen.hosts_provider` as discovery.type was removed in #25080. --- docs/plugins/discovery-gce.asciidoc | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/plugins/discovery-gce.asciidoc b/docs/plugins/discovery-gce.asciidoc index c194434414e22..33369eaba3c10 100644 --- a/docs/plugins/discovery-gce.asciidoc +++ b/docs/plugins/discovery-gce.asciidoc @@ -83,10 +83,10 @@ Examples: -------------------------------------------------- # get the IP address from network interface 1 network.host: _gce:privateIp:1_ -# shortcut for _gce:privateIp:0_ -network.host: _gce_ -# Using GCE internal hostname (recommended) +# Using GCE internal hostname network.host: _gce:hostname_ +# shortcut for _gce:privateIp:0_ (recommended) +network.host: _gce_ -------------------------------------------------- [[discovery-gce-usage-short]] @@ -227,7 +227,7 @@ cloud: project_id: es-cloud zone: europe-west1-a discovery: - type: gce + zen.hosts_provider: gce -------------------------------------------------- @@ -341,7 +341,7 @@ cloud: project_id: zone: ["", ""] discovery: - type: gce + zen.hosts_provider: gce -------------------------------------------------- @@ -377,7 +377,7 @@ cloud: project_id: es-cloud zone: europe-west1-a discovery: - type: gce + zen.hosts_provider: gce gce: tags: elasticsearch, dev -------------------------------------------------- @@ -492,7 +492,7 @@ cloud: project_id: es-cloud zone: europe-west1-a discovery: - type: gce + zen.hosts_provider: gce -------------------------------------------------- Replaces `project_id` and `zone` with your settings. From 053e23a43a91d3b6e5e5e7b636aa54eea59b1160 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mika=E2=A0=99?= Date: Mon, 12 Mar 2018 16:48:51 +0100 Subject: [PATCH 32/89] Add check when trying to reroute a shard to a non-data discovery node (#28886) While trying to reroute a shard to or from a non-data node (a node with ``node.data=false``), I encountered a null pointer exception. Though an exception is to be expected, the NPE was occurring because ``allocation.routingNodes()`` would not contain any non-data nodes, so when you attempt to do ``allocation.routingNodes.node(non-data-node)`` it would not find it, and thus error. This occurred regardless of whether I was rerouting to or from a non-data node. This PR adds a check (as well as a test for these use cases) to return a legible, useful exception if the discovery node you are rerouting to or from is not a data node. --- .../command/MoveAllocationCommand.java | 16 +++- .../allocation/AllocationCommandsTests.java | 79 +++++++++++++++++++ 2 files changed, 93 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java index 6e302d458ba8c..6b4af8c605aae 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java @@ -102,7 +102,20 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) Decision decision = null; boolean found = false; - for (ShardRouting shardRouting : allocation.routingNodes().node(fromDiscoNode.getId())) { + RoutingNode fromRoutingNode = allocation.routingNodes().node(fromDiscoNode.getId()); + if (fromRoutingNode == null && !fromDiscoNode.isDataNode()) { + throw new IllegalArgumentException("[move_allocation] can't move [" + index + "][" + shardId + "] from " + + fromDiscoNode + " to " + toDiscoNode + ": source [" + fromDiscoNode.getName() + + "] is not a data node."); + } + RoutingNode toRoutingNode = allocation.routingNodes().node(toDiscoNode.getId()); + if (toRoutingNode == null && !toDiscoNode.isDataNode()) { + throw new IllegalArgumentException("[move_allocation] can't move [" + index + "][" + shardId + "] from " + + fromDiscoNode + " to " + toDiscoNode + ": source [" + toDiscoNode.getName() + + "] is not a data node."); + } + + for (ShardRouting shardRouting : fromRoutingNode) { if (!shardRouting.shardId().getIndexName().equals(index)) { continue; } @@ -121,7 +134,6 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) ", shard is not started (state = " + shardRouting.state() + "]"); } - RoutingNode toRoutingNode = allocation.routingNodes().node(toDiscoNode.getId()); decision = allocation.deciders().canAllocate(shardRouting, toRoutingNode, allocation); if (decision.type() == Decision.Type.NO) { if (explain) { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 8e9cddcbea4ff..659813f62d46f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -21,12 +21,15 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterInfo; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.AbstractAllocateAllocationCommand; @@ -36,6 +39,7 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -47,12 +51,16 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; import java.util.Collections; +import java.util.EnumSet; +import java.util.HashSet; +import static java.util.Collections.emptyMap; import static java.util.Collections.singleton; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; @@ -520,4 +528,75 @@ public void testXContent() throws Exception { protected NamedXContentRegistry xContentRegistry() { return new NamedXContentRegistry(NetworkModule.getNamedXContents()); } + + public void testMoveShardToNonDataNode() { + AllocationService allocation = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); + + logger.info("creating an index with 1 shard, no replica"); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) + .build(); + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).build(); + + logger.info("--> adding two nodes"); + + DiscoveryNode node1 = new DiscoveryNode("node1", "node1", "node1", "test1", "test1", buildNewFakeTransportAddress(), emptyMap(), + MASTER_DATA_ROLES, Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("node2", "node2", "node2", "test2", "test2", buildNewFakeTransportAddress(), emptyMap(), + new HashSet<>(randomSubsetOf(EnumSet.of(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.INGEST))), Version.CURRENT); + + clusterState = ClusterState.builder(clusterState).nodes( + DiscoveryNodes.builder() + .add(node1) + .add(node2)).build(); + + logger.info("start primary shard"); + clusterState = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); + + Index index = clusterState.getMetaData().index("test").getIndex(); + MoveAllocationCommand command = new MoveAllocationCommand(index.getName(), 0, "node1", "node2"); + RoutingAllocation routingAllocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), + new RoutingNodes(clusterState, false), clusterState, ClusterInfo.EMPTY, System.nanoTime()); + logger.info("--> executing move allocation command to non-data node"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> command.execute(routingAllocation, false)); + assertEquals("[move_allocation] can't move [test][0] from " + node1 + " to " + node2 + ": source [" + node2.getName() + "] is not a data node.", e.getMessage()); + } + + public void testMoveShardFromNonDataNode() { + AllocationService allocation = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); + + logger.info("creating an index with 1 shard, no replica"); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) + .build(); + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).build(); + + logger.info("--> adding two nodes"); + + DiscoveryNode node1 = new DiscoveryNode("node1", "node1", "node1", "test1", "test1", buildNewFakeTransportAddress(), emptyMap(), + MASTER_DATA_ROLES, Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("node2", "node2", "node2", "test2", "test2", buildNewFakeTransportAddress(), emptyMap(), + new HashSet<>(randomSubsetOf(EnumSet.of(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.INGEST))), Version.CURRENT); + + clusterState = ClusterState.builder(clusterState).nodes( + DiscoveryNodes.builder() + .add(node1) + .add(node2)).build(); + logger.info("start primary shard"); + clusterState = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); + + Index index = clusterState.getMetaData().index("test").getIndex(); + MoveAllocationCommand command = new MoveAllocationCommand(index.getName(), 0, "node2", "node1"); + RoutingAllocation routingAllocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), + new RoutingNodes(clusterState, false), clusterState, ClusterInfo.EMPTY, System.nanoTime()); + logger.info("--> executing move allocation command from non-data node"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> command.execute(routingAllocation, false)); + assertEquals("[move_allocation] can't move [test][0] from " + node2 + " to " + node1 + ": source [" + node2.getName() + "] is not a data node.", e.getMessage()); + } } From b3218cbe43cc3b5dadaa1328ee20412a6d3b97de Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 12 Mar 2018 12:48:00 -0400 Subject: [PATCH 33/89] Create keystore on package install (#28928) This commit removes the ability to specify that a plugin requires the keystore and instead creates the keystore on package installation or when Elasticsearch is started for the first time. The reason that we opt to create the keystore on package installation is to ensure that the keystore has the correct permissions (the package installation scripts run as root as opposed to Elasticsearch running as the elasticsearch user) and to enable removing the keystore on package removal if the keystore is not modified. --- .../resources/plugin-descriptor.properties | 3 -- .../packages/src/common/scripts/postinst | 11 +++- .../packages/src/common/scripts/prerm | 11 ++++ .../plugins/InstallPluginCommand.java | 18 ------- .../plugins/InstallPluginCommandTests.java | 39 -------------- .../plugins/ListPluginsCommandTests.java | 53 ++++--------------- .../packaging/tests/20_tar_package.bats | 15 ++++++ .../packaging/tests/30_deb_package.bats | 5 ++ .../packaging/tests/40_rpm_package.bats | 2 + .../resources/packaging/utils/packages.bash | 5 ++ .../test/resources/packaging/utils/tar.bash | 1 + .../elasticsearch/bootstrap/Bootstrap.java | 13 +++-- .../org/elasticsearch/plugins/PluginInfo.java | 53 ++++++------------- .../elasticsearch/plugins/PluginsService.java | 2 +- .../nodesinfo/NodeInfoStreamingTests.java | 4 +- .../plugins/PluginInfoTests.java | 12 ++--- .../plugins/PluginsServiceTests.java | 48 ++++++++--------- 17 files changed, 116 insertions(+), 179 deletions(-) diff --git a/buildSrc/src/main/resources/plugin-descriptor.properties b/buildSrc/src/main/resources/plugin-descriptor.properties index 47550d2a4ffb0..03013e8fed849 100644 --- a/buildSrc/src/main/resources/plugin-descriptor.properties +++ b/buildSrc/src/main/resources/plugin-descriptor.properties @@ -43,6 +43,3 @@ extended.plugins=${extendedPlugins} # # 'has.native.controller': whether or not the plugin has a native controller has.native.controller=${hasNativeController} -# -# 'requires.keystore': whether or not the plugin needs the elasticsearch keystore be created -requires.keystore=${requiresKeystore} diff --git a/distribution/packages/src/common/scripts/postinst b/distribution/packages/src/common/scripts/postinst index a7ffda92d1e9d..45ff73fbe58f1 100644 --- a/distribution/packages/src/common/scripts/postinst +++ b/distribution/packages/src/common/scripts/postinst @@ -104,11 +104,18 @@ chmod g+s /etc/elasticsearch chmod 0750 /etc/elasticsearch if [ -f /etc/default/elasticsearch ]; then - chown root:elasticsearch /etc/default/elasticsearch + chown root:elasticsearch /etc/default/elasticsearch fi if [ -f /etc/sysconfig/elasticsearch ]; then - chown root:elasticsearch /etc/sysconfig/elasticsearch + chown root:elasticsearch /etc/sysconfig/elasticsearch +fi + +if [ ! -f "$ES_PATH_CONF"/elasticsearch.keystore ]; then + /usr/share/elasticsearch/bin/elasticsearch-keystore create + chown root:elasticsearch "$ES_PATH_CONF"/elasticsearch.keystore + chmod 660 "$ES_PATH_CONF"/elasticsearch.keystore + md5sum "$ES_PATH_CONF"/elasticsearch.keystore > "$ES_PATH_CONF"/.elasticsearch.keystore.initial_md5sum fi ${scripts.footer} diff --git a/distribution/packages/src/common/scripts/prerm b/distribution/packages/src/common/scripts/prerm index 632e59a26eab3..e2e9faa9cde21 100644 --- a/distribution/packages/src/common/scripts/prerm +++ b/distribution/packages/src/common/scripts/prerm @@ -44,6 +44,11 @@ case "$1" in ;; esac +ES_ENV_FILE="${path.env}" +if [ -f "$ES_ENV_FILE" ]; then + . "$ES_ENV_FILE" +fi + # Stops the service if [ "$STOP_REQUIRED" = "true" ]; then echo -n "Stopping elasticsearch service..." @@ -67,6 +72,12 @@ if [ "$STOP_REQUIRED" = "true" ]; then echo " OK" fi +if [ -f "$ES_PATH_CONF"/elasticsearch.keystore ]; then + if md5sum --status -c "$ES_PATH_CONF"/.elasticsearch.keystore.initial_md5sum; then + rm "$ES_PATH_CONF"/elasticsearch.keystore "$ES_PATH_CONF"/.elasticsearch.keystore.initial_md5sum + fi +fi + if [ "$REMOVE_SERVICE" = "true" ]; then if command -v systemctl >/dev/null; then systemctl disable elasticsearch.service > /dev/null 2>&1 || true diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 155b3a5647ae7..4648f18ffb812 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -686,12 +686,6 @@ private void installMetaPlugin(Terminal terminal, boolean isBatch, Path tmpRoot, } } movePlugin(tmpRoot, destination); - for (PluginInfo info : pluginInfos) { - if (info.requiresKeystore()) { - createKeystoreIfNeeded(terminal, env, info); - break; - } - } String[] plugins = pluginInfos.stream().map(PluginInfo::getName).toArray(String[]::new); terminal.println("-> Installed " + metaInfo.getName() + " with: " + Strings.arrayToCommaDelimitedString(plugins)); } @@ -716,9 +710,6 @@ private void installPlugin(Terminal terminal, boolean isBatch, Path tmpRoot, installPluginSupportFiles(info, tmpRoot, env.binFile().resolve(info.getName()), env.configFile().resolve(info.getName()), deleteOnFailure); movePlugin(tmpRoot, destination); - if (info.requiresKeystore()) { - createKeystoreIfNeeded(terminal, env, info); - } terminal.println("-> Installed " + info.getName()); } @@ -824,15 +815,6 @@ private void installConfig(PluginInfo info, Path tmpConfigDir, Path destConfigDi IOUtils.rm(tmpConfigDir); // clean up what we just copied } - private void createKeystoreIfNeeded(Terminal terminal, Environment env, PluginInfo info) throws Exception { - KeyStoreWrapper keystore = KeyStoreWrapper.load(env.configFile()); - if (keystore == null) { - terminal.println("Elasticsearch keystore is required by plugin [" + info.getName() + "], creating..."); - keystore = KeyStoreWrapper.create(); - keystore.save(env.configFile(), new char[0]); - } - } - private static void setOwnerGroup(final Path path, final PosixFileAttributes attributes) throws IOException { Objects.requireNonNull(attributes); PosixFileAttributeView fileAttributeView = Files.getFileAttributeView(path, PosixFileAttributeView.class); diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 4e0cecae12f31..d799cb0407f58 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -1140,45 +1140,6 @@ public void testSha1Mismatch() throws Exception { assertTrue(e.getMessage(), e.getMessage().contains("SHA-1 mismatch, expected foobar")); } - public void testKeystoreNotRequired() throws Exception { - Tuple env = createEnv(fs, temp); - Path pluginDir = createPluginDir(temp); - String pluginZip = createPluginUrl("fake", pluginDir, "requires.keystore", "false"); - installPlugin(pluginZip, env.v1()); - assertFalse(Files.exists(KeyStoreWrapper.keystorePath(env.v2().configFile()))); - } - - public void testKeystoreRequiredAlreadyExists() throws Exception { - Tuple env = createEnv(fs, temp); - KeyStoreWrapper keystore = KeyStoreWrapper.create(); - keystore.save(env.v2().configFile(), new char[0]); - byte[] expectedBytes = Files.readAllBytes(KeyStoreWrapper.keystorePath(env.v2().configFile())); - Path pluginDir = createPluginDir(temp); - String pluginZip = createPluginUrl("fake", pluginDir, "requires.keystore", "true"); - installPlugin(pluginZip, env.v1()); - byte[] gotBytes = Files.readAllBytes(KeyStoreWrapper.keystorePath(env.v2().configFile())); - assertArrayEquals("Keystore was modified", expectedBytes, gotBytes); - } - - public void testKeystoreRequiredCreated() throws Exception { - Tuple env = createEnv(fs, temp); - Path pluginDir = createPluginDir(temp); - String pluginZip = createPluginUrl("fake", pluginDir, "requires.keystore", "true"); - installPlugin(pluginZip, env.v1()); - assertTrue(Files.exists(KeyStoreWrapper.keystorePath(env.v2().configFile()))); - } - - public void testKeystoreRequiredCreatedWithMetaPlugin() throws Exception { - Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); - writePlugin("fake", pluginDir, "requires.keystore", "true"); - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - installPlugin(metaZip, env.v1()); - assertTrue(Files.exists(KeyStoreWrapper.keystorePath(env.v2().configFile()))); - } - private Function checksum(final MessageDigest digest) { return checksumAndString(digest, ""); } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index 1eb3f482c4195..42d0df75e9dc2 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -25,7 +25,6 @@ import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.util.Arrays; -import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; @@ -94,7 +93,7 @@ private static void buildFakePlugin( final String description, final String name, final String classname) throws IOException { - buildFakePlugin(env, null, description, name, classname, false, false); + buildFakePlugin(env, null, description, name, classname, false); } private static void buildFakePlugin( @@ -103,17 +102,16 @@ private static void buildFakePlugin( final String description, final String name, final String classname) throws IOException { - buildFakePlugin(env, metaPlugin, description, name, classname, false, false); + buildFakePlugin(env, metaPlugin, description, name, classname, false); } private static void buildFakePlugin( - final Environment env, - final String description, - final String name, - final String classname, - final boolean hasNativeController, - final boolean requiresKeystore) throws IOException { - buildFakePlugin(env, null, description, name, classname, hasNativeController, requiresKeystore); + final Environment env, + final String description, + final String name, + final String classname, + final boolean hasNativeController) throws IOException { + buildFakePlugin(env, null, description, name, classname, hasNativeController); } private static void buildFakePlugin( @@ -122,8 +120,7 @@ private static void buildFakePlugin( final String description, final String name, final String classname, - final boolean hasNativeController, - final boolean requiresKeystore) throws IOException { + final boolean hasNativeController) throws IOException { Path dest = metaPlugin != null ? env.pluginsFile().resolve(metaPlugin) : env.pluginsFile(); PluginTestUtil.writePluginProperties( dest.resolve(name), @@ -133,8 +130,7 @@ private static void buildFakePlugin( "elasticsearch.version", Version.CURRENT.toString(), "java.version", "1.8", "classname", classname, - "has.native.controller", Boolean.toString(hasNativeController), - "requires.keystore", Boolean.toString(requiresKeystore)); + "has.native.controller", Boolean.toString(hasNativeController)); } private static void buildFakeMetaPlugin( @@ -196,14 +192,13 @@ public void testPluginWithVerbose() throws Exception { "Elasticsearch Version: " + Version.CURRENT.toString(), "Java Version: 1.8", "Native Controller: false", - "Requires Keystore: false", "Extended Plugins: []", " * Classname: org.fake"), terminal.getOutput()); } public void testPluginWithNativeController() throws Exception { - buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake", true, false); + buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake", true); String[] params = { "-v" }; MockTerminal terminal = listPlugins(home, params); assertEquals( @@ -217,28 +212,6 @@ public void testPluginWithNativeController() throws Exception { "Elasticsearch Version: " + Version.CURRENT.toString(), "Java Version: 1.8", "Native Controller: true", - "Requires Keystore: false", - "Extended Plugins: []", - " * Classname: org.fake"), - terminal.getOutput()); - } - - public void testPluginWithRequiresKeystore() throws Exception { - buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake", false, true); - String[] params = { "-v" }; - MockTerminal terminal = listPlugins(home, params); - assertEquals( - buildMultiline( - "Plugins directory: " + env.pluginsFile(), - "fake_plugin1", - "- Plugin information:", - "Name: fake_plugin1", - "Description: fake desc 1", - "Version: 1.0", - "Elasticsearch Version: " + Version.CURRENT.toString(), - "Java Version: 1.8", - "Native Controller: false", - "Requires Keystore: true", "Extended Plugins: []", " * Classname: org.fake"), terminal.getOutput()); @@ -260,7 +233,6 @@ public void testPluginWithVerboseMultiplePlugins() throws Exception { "Elasticsearch Version: " + Version.CURRENT.toString(), "Java Version: 1.8", "Native Controller: false", - "Requires Keystore: false", "Extended Plugins: []", " * Classname: org.fake", "fake_plugin2", @@ -271,7 +243,6 @@ public void testPluginWithVerboseMultiplePlugins() throws Exception { "Elasticsearch Version: " + Version.CURRENT.toString(), "Java Version: 1.8", "Native Controller: false", - "Requires Keystore: false", "Extended Plugins: []", " * Classname: org.fake2"), terminal.getOutput()); @@ -295,7 +266,6 @@ public void testPluginWithVerboseMetaPlugins() throws Exception { "\tElasticsearch Version: " + Version.CURRENT.toString(), "\tJava Version: 1.8", "\tNative Controller: false", - "\tRequires Keystore: false", "\tExtended Plugins: []", "\t * Classname: org.fake", "\tfake_plugin2", @@ -306,7 +276,6 @@ public void testPluginWithVerboseMetaPlugins() throws Exception { "\tElasticsearch Version: " + Version.CURRENT.toString(), "\tJava Version: 1.8", "\tNative Controller: false", - "\tRequires Keystore: false", "\tExtended Plugins: []", "\t * Classname: org.fake2"), terminal.getOutput()); diff --git a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats index b456e1339de3c..3536c2a207ddd 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats @@ -100,6 +100,14 @@ setup() { } } +@test "[TAR] test creating elasticearch.keystore" { + sudo -E -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" create + assert_file "$ESCONFIG/elasticsearch.keystore" f elasticsearch elasticsearch 660 + sudo -E -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" list | grep "keystore.seed" + # cleanup for the next test + rm -rf "$ESCONFIG/elasticsearch.keystore" +} + ################################## # Check that Elasticsearch is working ################################## @@ -109,6 +117,13 @@ setup() { stop_elasticsearch_service } +@test "[TAR] test auto-creating elasticearch.keystore" { + # a keystore should automatically be created after the service is started + assert_file "$ESCONFIG/elasticsearch.keystore" f elasticsearch elasticsearch 660 + # the keystore should be seeded + sudo -E -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" list | grep "keystore.seed" +} + @test "[TAR] start Elasticsearch with custom JVM options" { local es_java_opts=$ES_JAVA_OPTS local es_path_conf=$ES_PATH_CONF diff --git a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats index 97d04c9405670..0b06e74555394 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats @@ -149,6 +149,9 @@ setup() { # The configuration files are still here assert_file_exist "/etc/elasticsearch" + # TODO: use ucf to handle these better for Debian-based systems + assert_file_not_exist "/etc/elasticsearch/elasticsearch.keystore" + assert_file_not_exist "/etc/elasticsearch/.elasticsearch.keystore.initial_md5sum" assert_file_exist "/etc/elasticsearch/elasticsearch.yml" assert_file_exist "/etc/elasticsearch/jvm.options" assert_file_exist "/etc/elasticsearch/log4j2.properties" @@ -170,6 +173,8 @@ setup() { @test "[DEB] verify package purge" { # all remaining files are deleted by the purge assert_file_not_exist "/etc/elasticsearch" + assert_file_not_exist "/etc/elasticsearch/elasticsearch.keystore" + assert_file_not_exist "/etc/elasticsearch/.elasticsearch.keystore.initial_md5sum" assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" assert_file_not_exist "/etc/elasticsearch/jvm.options" assert_file_not_exist "/etc/elasticsearch/log4j2.properties" diff --git a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats index 7df1593b62e80..e1b171a8e4b69 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats @@ -155,6 +155,7 @@ setup() { } @test "[RPM] reremove package" { + echo foobar | "$ESHOME/bin/elasticsearch-keystore" add --stdin foo.bar echo "# ping" >> "/etc/elasticsearch/elasticsearch.yml" echo "# ping" >> "/etc/elasticsearch/jvm.options" echo "# ping" >> "/etc/elasticsearch/log4j2.properties" @@ -181,6 +182,7 @@ setup() { assert_file_not_exist "/usr/share/elasticsearch/modules" assert_file_not_exist "/usr/share/elasticsearch/modules/lang-painless" + assert_file_exist "/etc/elasticsearch/elasticsearch.keystore" assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" assert_file_exist "/etc/elasticsearch/elasticsearch.yml.rpmsave" assert_file_not_exist "/etc/elasticsearch/jvm.options" diff --git a/qa/vagrant/src/test/resources/packaging/utils/packages.bash b/qa/vagrant/src/test/resources/packaging/utils/packages.bash index 86e182dbbff87..01ad9258cdbed 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/packages.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/packages.bash @@ -95,6 +95,11 @@ verify_package_installation() { assert_file "$ESHOME/bin/elasticsearch-translog" f root root 755 assert_file "$ESHOME/lib" d root root 755 assert_file "$ESCONFIG" d root elasticsearch 2750 + assert_file "$ESCONFIG/elasticsearch.keystore" f root elasticsearch 660 + + sudo -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" list | grep "keystore.seed" + + assert_file "$ESCONFIG/.elasticsearch.keystore.initial_md5sum" f root elasticsearch 644 assert_file "$ESCONFIG/elasticsearch.yml" f root elasticsearch 660 assert_file "$ESCONFIG/jvm.options" f root elasticsearch 660 assert_file "$ESCONFIG/log4j2.properties" f root elasticsearch 660 diff --git a/qa/vagrant/src/test/resources/packaging/utils/tar.bash b/qa/vagrant/src/test/resources/packaging/utils/tar.bash index d96d360a02bc3..9b4bc76d841c9 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/tar.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/tar.bash @@ -101,4 +101,5 @@ verify_archive_installation() { assert_file "$ESHOME/NOTICE.txt" f elasticsearch elasticsearch 644 assert_file "$ESHOME/LICENSE.txt" f elasticsearch elasticsearch 644 assert_file "$ESHOME/README.textile" f elasticsearch elasticsearch 644 + assert_file_not_exist "$ESCONFIG/elasticsearch.keystore" } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index dd80e64e50916..c035d4a737fb8 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -227,13 +227,16 @@ static SecureSettings loadSecureSettings(Environment initialEnv) throws Bootstra } catch (IOException e) { throw new BootstrapException(e); } - if (keystore == null) { - return null; // no keystore - } try { - keystore.decrypt(new char[0] /* TODO: read password from stdin */); - KeyStoreWrapper.upgrade(keystore, initialEnv.configFile(), new char[0]); + if (keystore == null) { + final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); + keyStoreWrapper.save(initialEnv.configFile(), new char[0]); + return keyStoreWrapper; + } else { + keystore.decrypt(new char[0] /* TODO: read password from stdin */); + KeyStoreWrapper.upgrade(keystore, initialEnv.configFile(), new char[0]); + } } catch (Exception e) { throw new BootstrapException(e); } diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java index 9f33b0d975fd1..fc54352704761 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -23,7 +23,6 @@ import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -32,18 +31,14 @@ import java.io.IOException; import java.io.InputStream; -import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; -import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; @@ -63,7 +58,6 @@ public class PluginInfo implements Writeable, ToXContentObject { private final String classname; private final List extendedPlugins; private final boolean hasNativeController; - private final boolean requiresKeystore; /** * Construct plugin info. @@ -76,10 +70,9 @@ public class PluginInfo implements Writeable, ToXContentObject { * @param classname the entry point to the plugin * @param extendedPlugins other plugins this plugin extends through SPI * @param hasNativeController whether or not the plugin has a native controller - * @param requiresKeystore whether or not the plugin requires the elasticsearch keystore to be created */ public PluginInfo(String name, String description, String version, Version elasticsearchVersion, String javaVersion, - String classname, List extendedPlugins, boolean hasNativeController, boolean requiresKeystore) { + String classname, List extendedPlugins, boolean hasNativeController) { this.name = name; this.description = description; this.version = version; @@ -88,7 +81,6 @@ public PluginInfo(String name, String description, String version, Version elast this.classname = classname; this.extendedPlugins = Collections.unmodifiableList(extendedPlugins); this.hasNativeController = hasNativeController; - this.requiresKeystore = requiresKeystore; } /** @@ -121,10 +113,12 @@ public PluginInfo(final StreamInput in) throws IOException { } else { hasNativeController = false; } - if (in.getVersion().onOrAfter(Version.V_6_0_0_beta2)) { - requiresKeystore = in.readBoolean(); - } else { - requiresKeystore = false; + if (in.getVersion().onOrAfter(Version.V_6_0_0_beta2) && in.getVersion().before(Version.V_7_0_0_alpha1)) { + /* + * Elasticsearch versions in [6.0.0-beta2, 7.0.0) allowed plugins to specify that they require the keystore and this was + * serialized into the plugin info. Therefore, we have to read and ignore this value from the stream. + */ + in.readBoolean(); } } @@ -144,8 +138,12 @@ public void writeTo(final StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_5_4_0)) { out.writeBoolean(hasNativeController); } - if (out.getVersion().onOrAfter(Version.V_6_0_0_beta2)) { - out.writeBoolean(requiresKeystore); + if (out.getVersion().onOrAfter(Version.V_6_0_0_beta2) && out.getVersion().before(Version.V_7_0_0_alpha1)) { + /* + * Elasticsearch versions in [6.0.0-beta2, 7.0.0) allowed plugins to specify that they require the keystore and this was + * serialized into the plugin info. Therefore, we have to write out a value for this boolean. + */ + out.writeBoolean(false); } } @@ -234,16 +232,8 @@ public static PluginInfo readFromProperties(final Path path) throws IOException } } - String requiresKeystoreValue = propsMap.remove("requires.keystore"); - if (requiresKeystoreValue == null) { - requiresKeystoreValue = "false"; - } - final boolean requiresKeystore; - try { - requiresKeystore = Booleans.parseBoolean(requiresKeystoreValue); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("property [requires.keystore] must be [true] or [false]," + - " but was [" + requiresKeystoreValue + "]", e); + if (esVersion.before(Version.V_6_3_0) && esVersion.onOrAfter(Version.V_6_0_0_beta2)) { + propsMap.remove("requires.keystore"); } if (propsMap.isEmpty() == false) { @@ -251,7 +241,7 @@ public static PluginInfo readFromProperties(final Path path) throws IOException } return new PluginInfo(name, description, version, esVersion, javaVersionString, - classname, extendedPlugins, hasNativeController, requiresKeystore); + classname, extendedPlugins, hasNativeController); } /** @@ -326,15 +316,6 @@ public boolean hasNativeController() { return hasNativeController; } - /** - * Whether or not the plugin requires the elasticsearch keystore to exist. - * - * @return {@code true} if the plugin requires a keystore, {@code false} otherwise - */ - public boolean requiresKeystore() { - return requiresKeystore; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -347,7 +328,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("classname", classname); builder.field("extended_plugins", extendedPlugins); builder.field("has_native_controller", hasNativeController); - builder.field("requires_keystore", requiresKeystore); } builder.endObject(); @@ -387,7 +367,6 @@ public String toString(String prefix) { .append(prefix).append("Elasticsearch Version: ").append(elasticsearchVersion).append("\n") .append(prefix).append("Java Version: ").append(javaVersion).append("\n") .append(prefix).append("Native Controller: ").append(hasNativeController).append("\n") - .append(prefix).append("Requires Keystore: ").append(requiresKeystore).append("\n") .append(prefix).append("Extended Plugins: ").append(extendedPlugins).append("\n") .append(prefix).append(" * Classname: ").append(classname); return information.toString(); diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index 766d171752c16..4514691e4bec4 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -110,7 +110,7 @@ public PluginsService(Settings settings, Path configPath, Path modulesDirectory, for (Class pluginClass : classpathPlugins) { Plugin plugin = loadPlugin(pluginClass, settings, configPath); PluginInfo pluginInfo = new PluginInfo(pluginClass.getName(), "classpath plugin", "NA", Version.CURRENT, "1.8", - pluginClass.getName(), Collections.emptyList(), false, false); + pluginClass.getName(), Collections.emptyList(), false); if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } diff --git a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 0f8f1ac7017e6..4da927459e55a 100644 --- a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -145,14 +145,14 @@ private static NodeInfo createNodeInfo() { for (int i = 0; i < numPlugins; i++) { plugins.add(new PluginInfo(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), VersionUtils.randomVersion(random()), "1.8", - randomAlphaOfLengthBetween(3, 10), Collections.emptyList(), randomBoolean(), randomBoolean())); + randomAlphaOfLengthBetween(3, 10), Collections.emptyList(), randomBoolean())); } int numModules = randomIntBetween(0, 5); List modules = new ArrayList<>(); for (int i = 0; i < numModules; i++) { modules.add(new PluginInfo(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), VersionUtils.randomVersion(random()), "1.8", - randomAlphaOfLengthBetween(3, 10), Collections.emptyList(), randomBoolean(), randomBoolean())); + randomAlphaOfLengthBetween(3, 10), Collections.emptyList(), randomBoolean())); } pluginsAndModules = new PluginsAndModules(plugins, modules); } diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index 88f6c7d83ae46..31854005d6532 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -185,7 +185,7 @@ public void testExtendedPluginsEmpty() throws Exception { public void testSerialize() throws Exception { PluginInfo info = new PluginInfo("c", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - Collections.singletonList("foo"), randomBoolean(), randomBoolean()); + Collections.singletonList("foo"), randomBoolean()); BytesStreamOutput output = new BytesStreamOutput(); info.writeTo(output); ByteBuffer buffer = ByteBuffer.wrap(output.bytes().toBytesRef().bytes); @@ -198,15 +198,15 @@ public void testSerialize() throws Exception { public void testPluginListSorted() { List plugins = new ArrayList<>(); plugins.add(new PluginInfo("c", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - Collections.emptyList(), randomBoolean(), randomBoolean())); + Collections.emptyList(), randomBoolean())); plugins.add(new PluginInfo("b", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - Collections.emptyList(), randomBoolean(), randomBoolean())); + Collections.emptyList(), randomBoolean())); plugins.add(new PluginInfo( "e", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - Collections.emptyList(), randomBoolean(), randomBoolean())); + Collections.emptyList(), randomBoolean())); plugins.add(new PluginInfo("a", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - Collections.emptyList(), randomBoolean(), randomBoolean())); + Collections.emptyList(), randomBoolean())); plugins.add(new PluginInfo("d", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - Collections.emptyList(), randomBoolean(), randomBoolean())); + Collections.emptyList(), randomBoolean())); PluginsAndModules pluginsInfo = new PluginsAndModules(plugins, Collections.emptyList()); final List infos = pluginsInfo.getPluginInfos(); diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 4f0a73ca44ca6..4d2eb6f2f36f3 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -283,7 +283,7 @@ public OneParameterIncorrectType(Object object) { public void testSortBundlesCycleSelfReference() throws Exception { Path pluginDir = createTempDir(); PluginInfo info = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("foo"), false, false); + "MyPlugin", Collections.singletonList("foo"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.sortBundles(Collections.singleton(bundle)) @@ -295,16 +295,16 @@ public void testSortBundlesCycle() throws Exception { Path pluginDir = createTempDir(); Set bundles = new LinkedHashSet<>(); // control iteration order, so we get know the beginning of the cycle PluginInfo info = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Arrays.asList("bar", "other"), false, false); + "MyPlugin", Arrays.asList("bar", "other"), false); bundles.add(new PluginsService.Bundle(info, pluginDir)); PluginInfo info2 = new PluginInfo("bar", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("baz"), false, false); + "MyPlugin", Collections.singletonList("baz"), false); bundles.add(new PluginsService.Bundle(info2, pluginDir)); PluginInfo info3 = new PluginInfo("baz", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("foo"), false, false); + "MyPlugin", Collections.singletonList("foo"), false); bundles.add(new PluginsService.Bundle(info3, pluginDir)); PluginInfo info4 = new PluginInfo("other", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.emptyList(), false, false); + "MyPlugin", Collections.emptyList(), false); bundles.add(new PluginsService.Bundle(info4, pluginDir)); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.sortBundles(bundles)); @@ -314,7 +314,7 @@ public void testSortBundlesCycle() throws Exception { public void testSortBundlesSingle() throws Exception { Path pluginDir = createTempDir(); PluginInfo info = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.emptyList(), false, false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info, pluginDir); List sortedBundles = PluginsService.sortBundles(Collections.singleton(bundle)); assertThat(sortedBundles, Matchers.contains(bundle)); @@ -324,15 +324,15 @@ public void testSortBundlesNoDeps() throws Exception { Path pluginDir = createTempDir(); Set bundles = new LinkedHashSet<>(); // control iteration order PluginInfo info1 = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.emptyList(), false, false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle1 = new PluginsService.Bundle(info1, pluginDir); bundles.add(bundle1); PluginInfo info2 = new PluginInfo("bar", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.emptyList(), false, false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle2 = new PluginsService.Bundle(info2, pluginDir); bundles.add(bundle2); PluginInfo info3 = new PluginInfo("baz", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.emptyList(), false, false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle3 = new PluginsService.Bundle(info3, pluginDir); bundles.add(bundle3); List sortedBundles = PluginsService.sortBundles(bundles); @@ -342,7 +342,7 @@ public void testSortBundlesNoDeps() throws Exception { public void testSortBundlesMissingDep() throws Exception { Path pluginDir = createTempDir(); PluginInfo info = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("dne"), false, false); + "MyPlugin", Collections.singletonList("dne"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info, pluginDir); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginsService.sortBundles(Collections.singleton(bundle)) @@ -354,19 +354,19 @@ public void testSortBundlesCommonDep() throws Exception { Path pluginDir = createTempDir(); Set bundles = new LinkedHashSet<>(); // control iteration order PluginInfo info1 = new PluginInfo("grandparent", "desc", "1.0",Version.CURRENT, "1.8", - "MyPlugin", Collections.emptyList(), false, false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle1 = new PluginsService.Bundle(info1, pluginDir); bundles.add(bundle1); PluginInfo info2 = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("common"), false, false); + "MyPlugin", Collections.singletonList("common"), false); PluginsService.Bundle bundle2 = new PluginsService.Bundle(info2, pluginDir); bundles.add(bundle2); PluginInfo info3 = new PluginInfo("bar", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("common"), false, false); + "MyPlugin", Collections.singletonList("common"), false); PluginsService.Bundle bundle3 = new PluginsService.Bundle(info3, pluginDir); bundles.add(bundle3); PluginInfo info4 = new PluginInfo("common", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("grandparent"), false, false); + "MyPlugin", Collections.singletonList("grandparent"), false); PluginsService.Bundle bundle4 = new PluginsService.Bundle(info4, pluginDir); bundles.add(bundle4); List sortedBundles = PluginsService.sortBundles(bundles); @@ -377,11 +377,11 @@ public void testSortBundlesAlreadyOrdered() throws Exception { Path pluginDir = createTempDir(); Set bundles = new LinkedHashSet<>(); // control iteration order PluginInfo info1 = new PluginInfo("dep", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.emptyList(), false, false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle1 = new PluginsService.Bundle(info1, pluginDir); bundles.add(bundle1); PluginInfo info2 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("dep"), false, false); + "MyPlugin", Collections.singletonList("dep"), false); PluginsService.Bundle bundle2 = new PluginsService.Bundle(info2, pluginDir); bundles.add(bundle2); List sortedBundles = PluginsService.sortBundles(bundles); @@ -440,7 +440,7 @@ public void testJarHellDuplicateCodebaseWithDep() throws Exception { Map> transitiveDeps = new HashMap<>(); transitiveDeps.put("dep", Collections.singleton(dupJar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("dep"), false, false); + "MyPlugin", Collections.singletonList("dep"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(bundle, transitiveDeps)); @@ -459,7 +459,7 @@ public void testJarHellDuplicateCodebaseAcrossDeps() throws Exception { transitiveDeps.put("dep1", Collections.singleton(dupJar.toUri().toURL())); transitiveDeps.put("dep2", Collections.singleton(dupJar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Arrays.asList("dep1", "dep2"), false, false); + "MyPlugin", Arrays.asList("dep1", "dep2"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(bundle, transitiveDeps)); @@ -476,7 +476,7 @@ public void testJarHellDuplicateClassWithCore() throws Exception { Path pluginJar = pluginDir.resolve("plugin.jar"); makeJar(pluginJar, Level.class); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.emptyList(), false, false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(bundle, new HashMap<>())); @@ -495,7 +495,7 @@ public void testJarHellDuplicateClassWithDep() throws Exception { Map> transitiveDeps = new HashMap<>(); transitiveDeps.put("dep", Collections.singleton(depJar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Collections.singletonList("dep"), false, false); + "MyPlugin", Collections.singletonList("dep"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(bundle, transitiveDeps)); @@ -518,7 +518,7 @@ public void testJarHellDuplicateClassAcrossDeps() throws Exception { transitiveDeps.put("dep1", Collections.singleton(dep1Jar.toUri().toURL())); transitiveDeps.put("dep2", Collections.singleton(dep2Jar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Arrays.asList("dep1", "dep2"), false, false); + "MyPlugin", Arrays.asList("dep1", "dep2"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(bundle, transitiveDeps)); @@ -541,7 +541,7 @@ public void testJarHellTransitiveMap() throws Exception { transitiveDeps.put("dep1", Collections.singleton(dep1Jar.toUri().toURL())); transitiveDeps.put("dep2", Collections.singleton(dep2Jar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", Arrays.asList("dep1", "dep2"), false, false); + "MyPlugin", Arrays.asList("dep1", "dep2"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); PluginsService.checkBundleJarHell(bundle, transitiveDeps); Set deps = transitiveDeps.get("myplugin"); @@ -590,14 +590,14 @@ public void testNonExtensibleDep() throws Exception { public void testIncompatibleElasticsearchVersion() throws Exception { PluginInfo info = new PluginInfo("my_plugin", "desc", "1.0", Version.V_5_0_0, - "1.8", "FakePlugin", Collections.emptyList(), false, false); + "1.8", "FakePlugin", Collections.emptyList(), false); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginsService.verifyCompatibility(info)); assertThat(e.getMessage(), containsString("was built for Elasticsearch version 5.0.0")); } public void testIncompatibleJavaVersion() throws Exception { PluginInfo info = new PluginInfo("my_plugin", "desc", "1.0", Version.CURRENT, - "1000000.0", "FakePlugin", Collections.emptyList(), false, false); + "1000000.0", "FakePlugin", Collections.emptyList(), false); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.verifyCompatibility(info)); assertThat(e.getMessage(), containsString("my_plugin requires Java")); } From 9d431fcbede60487dddf2619a1a4bf38473d819c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 12 Mar 2018 13:04:37 -0400 Subject: [PATCH 34/89] Fix BWC versions on plugin info This commit fixes the BWC versions on the plugin info serialization which was changed to remove the requiresKeystore flag. --- .../src/main/java/org/elasticsearch/plugins/PluginInfo.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java index fc54352704761..eace29a78014d 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -113,7 +113,7 @@ public PluginInfo(final StreamInput in) throws IOException { } else { hasNativeController = false; } - if (in.getVersion().onOrAfter(Version.V_6_0_0_beta2) && in.getVersion().before(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_0_0_beta2) && in.getVersion().before(Version.V_6_3_0)) { /* * Elasticsearch versions in [6.0.0-beta2, 7.0.0) allowed plugins to specify that they require the keystore and this was * serialized into the plugin info. Therefore, we have to read and ignore this value from the stream. @@ -138,7 +138,7 @@ public void writeTo(final StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_5_4_0)) { out.writeBoolean(hasNativeController); } - if (out.getVersion().onOrAfter(Version.V_6_0_0_beta2) && out.getVersion().before(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_0_0_beta2) && out.getVersion().before(Version.V_6_3_0)) { /* * Elasticsearch versions in [6.0.0-beta2, 7.0.0) allowed plugins to specify that they require the keystore and this was * serialized into the plugin info. Therefore, we have to write out a value for this boolean. From 43ecc49b1393b91b4ebd7ed82b5337add5693786 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 12 Mar 2018 18:02:30 +0100 Subject: [PATCH 35/89] [Test] GoogleCloudStorageFixture command line is too long on Windows (#28991) Windows has some strong limitations on command line arguments, specially when it's too long. In the googlecloudstoragefixture anttask the classpath argument is very long and the command fails. This commit removes the classpath as an argument and uses the CLASSPATH environment variable instead. --- plugins/repository-gcs/build.gradle | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 450af05b75ac1..2ed37be68f9b8 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -60,11 +60,10 @@ thirdPartyAudit.excludes = [ /** A task to start the GoogleCloudStorageFixture which emulates a Google Cloud Storage service **/ task googleCloudStorageFixture(type: AntFixture) { - dependsOn compileTestJava - executable = new File(project.runtimeJavaHome, 'bin/java') - args '-cp', "${ -> project.sourceSets.test.runtimeClasspath.asPath }", - 'org.elasticsearch.repositories.gcs.GoogleCloudStorageFixture', - baseDir, 'bucket_test' + dependsOn compileTestJava + env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" + executable = new File(project.runtimeJavaHome, 'bin/java') + args 'org.elasticsearch.repositories.gcs.GoogleCloudStorageFixture', baseDir, 'bucket_test' } /** A service account file that points to the Google Cloud Storage service emulated by the fixture **/ From 04a76740efb7d91e0a22a94deefb59e3e404dac1 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 12 Mar 2018 13:44:47 -0400 Subject: [PATCH 36/89] Stop sourcing scripts during installation/removal (#28918) Previously we allowed a lot of customization of Elasticsearch during package installation (e.g., the username and group). This customization was achieved by sourcing the env script (e.g., /etc/sysconfig/elasticsearch) during installation. Since we no longer allow such flexibility, we do not need to source these env scripts during package installation and removal. --- .../packages/src/common/scripts/postinst | 8 ---- .../packages/src/common/scripts/postrm | 39 +++++-------------- .../packages/src/common/scripts/preinst | 8 ---- .../packages/src/common/scripts/prerm | 3 -- 4 files changed, 10 insertions(+), 48 deletions(-) diff --git a/distribution/packages/src/common/scripts/postinst b/distribution/packages/src/common/scripts/postinst index 45ff73fbe58f1..2edd78785f9d2 100644 --- a/distribution/packages/src/common/scripts/postinst +++ b/distribution/packages/src/common/scripts/postinst @@ -8,14 +8,6 @@ # $1=0 : indicates a removal # $1=1 : indicates an upgrade - - -# Source the default env file -ES_ENV_FILE="${path.env}" -if [ -f "$ES_ENV_FILE" ]; then - . "$ES_ENV_FILE" -fi - IS_UPGRADE=false case "$1" in diff --git a/distribution/packages/src/common/scripts/postrm b/distribution/packages/src/common/scripts/postrm index 48896109cb9ff..a3cb5e1208fe7 100644 --- a/distribution/packages/src/common/scripts/postrm +++ b/distribution/packages/src/common/scripts/postrm @@ -9,9 +9,6 @@ # $1=0 : indicates a removal # $1=1 : indicates an upgrade - - -SOURCE_ENV_FILE=true REMOVE_DIRS=false REMOVE_USER_AND_GROUP=false @@ -24,7 +21,6 @@ case "$1" in purge) REMOVE_USER_AND_GROUP=true - SOURCE_ENV_FILE=false ;; failed-upgrade|abort-install|abort-upgrade|disappear|upgrade|disappear) ;; @@ -45,49 +41,34 @@ case "$1" in ;; esac -# Sets the default values for elasticsearch variables used in this script -LOG_DIR="/var/log/elasticsearch" -PLUGINS_DIR="/usr/share/elasticsearch/plugins" -PID_DIR="/var/run/elasticsearch" -DATA_DIR="/var/lib/elasticsearch" -ES_PATH_CONF="/etc/elasticsearch" - -# Source the default env file -if [ "$SOURCE_ENV_FILE" = "true" ]; then - ES_ENV_FILE="${path.env}" - if [ -f "$ES_ENV_FILE" ]; then - . "$ES_ENV_FILE" - fi -fi - if [ "$REMOVE_DIRS" = "true" ]; then - if [ -d "$LOG_DIR" ]; then + if [ -d /var/log/elasticsearch ]; then echo -n "Deleting log directory..." - rm -rf "$LOG_DIR" + rm -rf /var/log/elasticsearch echo " OK" fi - if [ -d "$PLUGINS_DIR" ]; then + if [ -d /usr/share/elasticsearch/plugins ]; then echo -n "Deleting plugins directory..." - rm -rf "$PLUGINS_DIR" + rm -rf /usr/share/elasticsearch/plugins echo " OK" fi - if [ -d "$PID_DIR" ]; then + if [ -d /var/run/elasticsearch ]; then echo -n "Deleting PID directory..." - rm -rf "$PID_DIR" + rm -rf /var/run/elasticsearch echo " OK" fi # Delete the data directory if and only if empty - if [ -d "$DATA_DIR" ]; then - rmdir --ignore-fail-on-non-empty "$DATA_DIR" + if [ -d /var/lib/elasticsearch ]; then + rmdir --ignore-fail-on-non-empty /var/lib/elasticsearch fi # delete the conf directory if and only if empty - if [ -d "$ES_PATH_CONF" ]; then - rmdir --ignore-fail-on-non-empty "$ES_PATH_CONF" + if [ -d /etc/elasticsearch ]; then + rmdir --ignore-fail-on-non-empty /etc/elasticsearch fi fi diff --git a/distribution/packages/src/common/scripts/preinst b/distribution/packages/src/common/scripts/preinst index 73bfe3c2468e9..a9e5295cbc56d 100644 --- a/distribution/packages/src/common/scripts/preinst +++ b/distribution/packages/src/common/scripts/preinst @@ -9,14 +9,6 @@ # $1=1 : indicates an new install # $1=2 : indicates an upgrade - - -# Source the default env file -ES_ENV_FILE="${path.env}" -if [ -f "$ES_ENV_FILE" ]; then - . "$ES_ENV_FILE" -fi - case "$1" in # Debian #################################################### diff --git a/distribution/packages/src/common/scripts/prerm b/distribution/packages/src/common/scripts/prerm index e2e9faa9cde21..f13d23cdc9b50 100644 --- a/distribution/packages/src/common/scripts/prerm +++ b/distribution/packages/src/common/scripts/prerm @@ -9,8 +9,6 @@ # $1=0 : indicates a removal # $1=1 : indicates an upgrade - - STOP_REQUIRED=false REMOVE_SERVICE=false @@ -92,5 +90,4 @@ if [ "$REMOVE_SERVICE" = "true" ]; then fi fi - ${scripts.footer} From d61df4a9e87b071bbe9b1f43ed8cdb54714b150f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 12 Mar 2018 14:17:56 -0400 Subject: [PATCH 37/89] Fix comment regarding removal of requiresKeystore The requiresKeystore flag was removed from PluginInfo in 6.3.0. This commit fixes a pair of code comments that incorrectly refer to this version as 7.0.0. --- .../src/main/java/org/elasticsearch/plugins/PluginInfo.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java index eace29a78014d..37f4fcc953564 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -115,7 +115,7 @@ public PluginInfo(final StreamInput in) throws IOException { } if (in.getVersion().onOrAfter(Version.V_6_0_0_beta2) && in.getVersion().before(Version.V_6_3_0)) { /* - * Elasticsearch versions in [6.0.0-beta2, 7.0.0) allowed plugins to specify that they require the keystore and this was + * Elasticsearch versions in [6.0.0-beta2, 6.3.0) allowed plugins to specify that they require the keystore and this was * serialized into the plugin info. Therefore, we have to read and ignore this value from the stream. */ in.readBoolean(); @@ -140,7 +140,7 @@ public void writeTo(final StreamOutput out) throws IOException { } if (out.getVersion().onOrAfter(Version.V_6_0_0_beta2) && out.getVersion().before(Version.V_6_3_0)) { /* - * Elasticsearch versions in [6.0.0-beta2, 7.0.0) allowed plugins to specify that they require the keystore and this was + * Elasticsearch versions in [6.0.0-beta2, 6.3.0) allowed plugins to specify that they require the keystore and this was * serialized into the plugin info. Therefore, we have to write out a value for this boolean. */ out.writeBoolean(false); From b844b4da4473d419340b141ec0e80f8fc883c512 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 12 Mar 2018 23:20:07 -0400 Subject: [PATCH 38/89] Add test for dying with dignity (#28987) I have long wanted an actual test that dying with dignity works. It is tricky because if dying with dignity works, it means the test JVM dies which is usually an abnormal condition. And anyway, how does one force a fatal error to be thrown. I was motivated to investigate this again by the fact that I missed a backport to one branch leading to an issue where Elasticsearch would not successfully die with dignity. And now we have a solution: we install a plugin that throws an out of memory error when it receives a request. We hack the standalone test infrastructure to prevent this from failing the test. To do this, we bypass the security manager and remove the PID file for the node; this tricks the test infrastructure into thinking that it does not need to stop the node. We also bypass seccomp so that we can fork jps to make sure that Elasticsearch really died. And to be extra paranoid, we parse the logs of the dead Elasticsearch process to make sure it died with dignity. Never forget. --- qa/die-with-dignity/build.gradle | 37 +++++++ .../elasticsearch/DieWithDignityPlugin.java | 51 ++++++++++ .../RestDieWithDignityAction.java | 50 ++++++++++ .../qa/die_with_dignity/DieWithDignityIT.java | 98 +++++++++++++++++++ .../bootstrap/BootstrapForTesting.java | 3 +- .../test/rest/ESRestTestCase.java | 19 +++- 6 files changed, 254 insertions(+), 4 deletions(-) create mode 100644 qa/die-with-dignity/build.gradle create mode 100644 qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java create mode 100644 qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java create mode 100644 qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java diff --git a/qa/die-with-dignity/build.gradle b/qa/die-with-dignity/build.gradle new file mode 100644 index 0000000000000..a3a9bd3da5800 --- /dev/null +++ b/qa/die-with-dignity/build.gradle @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +apply plugin: 'elasticsearch.esplugin' + +esplugin { + description 'Out of memory plugin' + classname 'org.elasticsearch.DieWithDignityPlugin' +} + +integTestRunner { + systemProperty 'tests.security.manager', 'false' + systemProperty 'tests.system_call_filter', 'false' + systemProperty 'pidfile', "${-> integTest.getNodes().get(0).pidFile}" + systemProperty 'log', "${-> integTest.getNodes().get(0).homeDir}/logs/${-> integTest.getNodes().get(0).clusterName}.log" + systemProperty 'runtime.java.home', "${project.runtimeJavaHome}" +} + +test.enabled = false + +check.dependsOn integTest diff --git a/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java b/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java new file mode 100644 index 0000000000000..ed1e3d3879a5d --- /dev/null +++ b/qa/die-with-dignity/src/main/java/org/elasticsearch/DieWithDignityPlugin.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch; + +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; + +import java.util.Collections; +import java.util.List; +import java.util.function.Supplier; + +public class DieWithDignityPlugin extends Plugin implements ActionPlugin { + + @Override + public List getRestHandlers( + final Settings settings, + final RestController restController, + final ClusterSettings clusterSettings, + final IndexScopedSettings indexScopedSettings, + final SettingsFilter settingsFilter, + final IndexNameExpressionResolver indexNameExpressionResolver, + final Supplier nodesInCluster) { + return Collections.singletonList(new RestDieWithDignityAction(settings, restController)); + } + +} diff --git a/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java b/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java new file mode 100644 index 0000000000000..6aa56aa30be97 --- /dev/null +++ b/qa/die-with-dignity/src/main/java/org/elasticsearch/RestDieWithDignityAction.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.http.HttpStats; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +public class RestDieWithDignityAction extends BaseRestHandler { + + RestDieWithDignityAction(final Settings settings, final RestController restController) { + super(settings); + restController.registerHandler(RestRequest.Method.GET, "/_die_with_dignity", this); + } + + @Override + public String getName() { + return "die_with_dignity_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + throw new OutOfMemoryError("die with dignity"); + } + +} diff --git a/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java b/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java new file mode 100644 index 0000000000000..8aaf81968561a --- /dev/null +++ b/qa/die-with-dignity/src/test/java/org/elasticsearch/qa/die_with_dignity/DieWithDignityIT.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.qa.die_with_dignity; + +import org.apache.http.ConnectionClosedException; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.test.rest.ESRestTestCase; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.CountDownLatch; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; + +public class DieWithDignityIT extends ESRestTestCase { + + public void testDieWithDignity() throws Exception { + // deleting the PID file prevents stopping the cluster from failing since it occurs if and only if the PID file exists + final Path pidFile = PathUtils.get(System.getProperty("pidfile")); + final List pidFileLines = Files.readAllLines(pidFile); + assertThat(pidFileLines, hasSize(1)); + final int pid = Integer.parseInt(pidFileLines.get(0)); + Files.delete(pidFile); + expectThrows(ConnectionClosedException.class, () -> client().performRequest("GET", "/_die_with_dignity")); + + // the Elasticsearch process should die and disappear from the output of jps + assertBusy(() -> { + final String jpsPath = PathUtils.get(System.getProperty("runtime.java.home"), "bin/jps").toString(); + final Process process = new ProcessBuilder().command(jpsPath).start(); + assertThat(process.waitFor(), equalTo(0)); + try (InputStream is = process.getInputStream(); + BufferedReader in = new BufferedReader(new InputStreamReader(is, "UTF-8"))) { + String line; + while ((line = in.readLine()) != null) { + final int currentPid = Integer.parseInt(line.split("\\s+")[0]); + assertThat(line, pid, not(equalTo(currentPid))); + } + } + }); + + // parse the logs and ensure that Elasticsearch died with the expected cause + final List lines = Files.readAllLines(PathUtils.get(System.getProperty("log"))); + + final Iterator it = lines.iterator(); + + boolean fatalErrorOnTheNetworkLayer = false; + boolean fatalErrorInThreadExiting = false; + + while (it.hasNext() && (fatalErrorOnTheNetworkLayer == false || fatalErrorInThreadExiting == false)) { + final String line = it.next(); + if (line.contains("fatal error on the network layer")) { + fatalErrorOnTheNetworkLayer = true; + } else if (line.matches(".*\\[ERROR\\]\\[o.e.b.ElasticsearchUncaughtExceptionHandler\\] \\[node-0\\]" + + " fatal error in thread \\[Thread-\\d+\\], exiting$")) { + fatalErrorInThreadExiting = true; + assertTrue(it.hasNext()); + assertThat(it.next(), equalTo("java.lang.OutOfMemoryError: die with dignity")); + } + } + + assertTrue(fatalErrorOnTheNetworkLayer); + assertTrue(fatalErrorInThreadExiting); + } + + @Override + protected boolean preserveClusterUponCompletion() { + // as the cluster is dead its state can not be wiped successfully so we have to bypass wiping the cluster + return true; + } + +} diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 9c891b3f243ef..2692a9521a9df 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -78,7 +78,8 @@ public class BootstrapForTesting { } // just like bootstrap, initialize natives, then SM - Bootstrap.initializeNatives(javaTmpDir, true, true, true); + final boolean systemCallFilter = Booleans.parseBoolean(System.getProperty("tests.system_call_filter", "true")); + Bootstrap.initializeNatives(javaTmpDir, true, systemCallFilter, true); // initialize probes Bootstrap.initializeProbes(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 0290a1009144d..0d07f18bac0cd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -145,9 +145,11 @@ public void initClient() throws IOException { */ @After public final void cleanUpCluster() throws Exception { - wipeCluster(); - waitForClusterStateUpdatesToFinish(); - logIfThereAreRunningTasks(); + if (preserveClusterUponCompletion() == false) { + wipeCluster(); + waitForClusterStateUpdatesToFinish(); + logIfThereAreRunningTasks(); + } } @AfterClass @@ -175,6 +177,17 @@ protected static RestClient adminClient() { return adminClient; } + /** + * Returns whether to preserve the state of the cluster upon completion of this test. Defaults to false. If true, overrides the value of + * {@link #preserveIndicesUponCompletion()}, {@link #preserveTemplatesUponCompletion()}, {@link #preserveReposUponCompletion()}, and + * {@link #preserveSnapshotsUponCompletion()}. + * + * @return true if the state of the cluster should be preserved + */ + protected boolean preserveClusterUponCompletion() { + return false; + } + /** * Returns whether to preserve the indices created during this test on completion of this test. * Defaults to {@code false}. Override this method if indices should be preserved after the test, From 40eaa95320e2ccf88f58e252e357d168c173b9be Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 13 Mar 2018 09:10:40 +0100 Subject: [PATCH 39/89] Skip GeoIpProcessorFactoryTests on Windows (#29005) With this commit we skip all GeoIpProcessorFactoryTests on Windows. These tests use a MappedByteBuffer which will keep its file mappings until it is garbage-collected. As a consequence, the corresponding file appears to be still in use, Windows cannot delete it and the test will fail in teardown. Closes #29001 --- .../geoip/GeoIpProcessorFactoryTests.java | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 0cc9e8a484747..0aa2eb9fdfa3b 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -22,6 +22,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.maxmind.db.NoCache; import com.maxmind.db.NodeCache; +import org.apache.lucene.util.Constants; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Randomness; import org.elasticsearch.test.ESTestCase; @@ -51,6 +52,13 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { @BeforeClass public static void loadDatabaseReaders() throws IOException { + // Skip setup because Windows cannot cleanup these files properly. The reason is that they are using + // a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. As a consequence, + // the corresponding file appears to be still in use and Windows cannot delete it. + if (Constants.WINDOWS) { + return; + } + Path configDir = createTempDir(); Path geoIpConfigDir = configDir.resolve("ingest-geoip"); Files.createDirectories(geoIpConfigDir); @@ -67,6 +75,13 @@ public static void loadDatabaseReaders() throws IOException { @AfterClass public static void closeDatabaseReaders() throws IOException { + // Skip setup because Windows cannot cleanup these files properly. The reason is that they are using + // a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. As a consequence, + // the corresponding file appears to be still in use and Windows cannot delete it. + if (Constants.WINDOWS) { + return; + } + for (DatabaseReaderLazyLoader reader : databaseReaders.values()) { reader.close(); } @@ -74,6 +89,9 @@ public static void closeDatabaseReaders() throws IOException { } public void testBuildDefaults() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); @@ -90,6 +108,9 @@ public void testBuildDefaults() throws Exception { } public void testSetIgnoreMissing() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); @@ -107,6 +128,9 @@ public void testSetIgnoreMissing() throws Exception { } public void testCountryBuildDefaults() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); @@ -125,6 +149,9 @@ public void testCountryBuildDefaults() throws Exception { } public void testAsnBuildDefaults() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); @@ -143,6 +170,9 @@ public void testAsnBuildDefaults() throws Exception { } public void testBuildTargetField() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("field", "_field"); @@ -154,6 +184,9 @@ public void testBuildTargetField() throws Exception { } public void testBuildDbFile() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("field", "_field"); @@ -167,6 +200,9 @@ public void testBuildDbFile() throws Exception { } public void testBuildWithCountryDbAndAsnFields() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("field", "_field"); @@ -181,6 +217,9 @@ public void testBuildWithCountryDbAndAsnFields() throws Exception { } public void testBuildWithAsnDbAndCityFields() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); config.put("field", "_field"); @@ -195,6 +234,9 @@ public void testBuildWithAsnDbAndCityFields() throws Exception { } public void testBuildNonExistingDbFile() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config = new HashMap<>(); @@ -205,6 +247,9 @@ public void testBuildNonExistingDbFile() throws Exception { } public void testBuildFields() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Set properties = EnumSet.noneOf(GeoIpProcessor.Property.class); @@ -229,6 +274,9 @@ public void testBuildFields() throws Exception { } public void testBuildIllegalFieldOption() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); Map config1 = new HashMap<>(); @@ -246,6 +294,9 @@ public void testBuildIllegalFieldOption() throws Exception { } public void testLazyLoading() throws Exception { + // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. + // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. + assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); Path configDir = createTempDir(); Path geoIpConfigDir = configDir.resolve("ingest-geoip"); Files.createDirectories(geoIpConfigDir); From 2990fc60e3bfa9ecec624ceb9a2a3e76ccece08c Mon Sep 17 00:00:00 2001 From: Joost Rothweiler Date: Tue, 13 Mar 2018 09:58:47 +0100 Subject: [PATCH 40/89] Indices PUT Mapping API docs: Remove mapping type user and rephrase first sentence (#28998) The current docs on [Indices APIs: PUT Mapping](https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-mapping.html) suggests that a having number of different mapping types per index is still possible in elasticsearch versions > 6.0.0 although they have been [removed](https://www.elastic.co/guide/en/elasticsearch/reference/current/removal-of-types.html). The console code has already been updated accordingly but notes (2) and (3) on the console code still name the `user` mapping type. This PR updates the list with notes after the console code, as well as the first sentence of the docs to avoid confusion. Also, I have removed the second command from the console code as it no longer holds any value if the docs are solely on the `_doc` mapping. --- docs/reference/indices/put-mapping.asciidoc | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/docs/reference/indices/put-mapping.asciidoc b/docs/reference/indices/put-mapping.asciidoc index 44a689e98edb3..84838d67e1d03 100644 --- a/docs/reference/indices/put-mapping.asciidoc +++ b/docs/reference/indices/put-mapping.asciidoc @@ -1,8 +1,7 @@ [[indices-put-mapping]] == Put Mapping -The PUT mapping API allows you to add a new type to an existing index, or add new -fields to an existing type: +The PUT mapping API allows you to add fields to an existing index or to change search only settings of existing fields. [source,js] -------------------------------------------------- @@ -10,15 +9,6 @@ PUT twitter <1> {} PUT twitter/_mapping/_doc <2> -{ - "properties": { - "name": { - "type": "text" - } - } -} - -PUT twitter/_mapping/_doc <3> { "properties": { "email": { @@ -29,8 +19,7 @@ PUT twitter/_mapping/_doc <3> -------------------------------------------------- // CONSOLE <1> <> called `twitter` without any type mapping. -<2> Uses the PUT mapping API to add a new mapping type called `user`. -<3> Uses the PUT mapping API to add a new field called `email` to the `user` mapping type. +<2> Uses the PUT mapping API to add a new field called `email` to the `_doc` mapping type. More information on how to define type mappings can be found in the <> section. @@ -125,4 +114,3 @@ PUT my_index/_mapping/_doc Each <> specifies whether or not its setting can be updated on an existing field. - From 25f4ebeb26c6e050d9a50e2cefbd7314ca191aa9 Mon Sep 17 00:00:00 2001 From: olcbean <26058559+olcbean@users.noreply.github.com> Date: Tue, 13 Mar 2018 12:12:52 +0100 Subject: [PATCH 41/89] REST api specs : remove unsupported `wait_for_merge` param (#28959) --- .../main/resources/rest-api-spec/api/indices.forcemerge.json | 4 ---- 1 file changed, 4 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json index d8edd550aa6b8..d87ce2a4451d6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json @@ -37,10 +37,6 @@ "only_expunge_deletes": { "type" : "boolean", "description" : "Specify whether the operation should only expunge deleted documents" - }, - "wait_for_merge": { - "type" : "boolean", - "description" : "Specify whether the request should block until the merge process is finished (default: true)" } } }, From 540f7058e824dd59ba3ab9c9fc4d8e38ea12b80d Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Tue, 13 Mar 2018 15:02:25 +0100 Subject: [PATCH 42/89] Add migration docs for Geoip Processor (#29006) This commit adds a note to the 6.3 docs that Geoip database files are now stored uncompressed. Relates #28963 --- docs/reference/migration/migrate_6_3.asciidoc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/reference/migration/migrate_6_3.asciidoc b/docs/reference/migration/migrate_6_3.asciidoc index 653c99d2a338f..50cbf5769e596 100644 --- a/docs/reference/migration/migrate_6_3.asciidoc +++ b/docs/reference/migration/migrate_6_3.asciidoc @@ -10,3 +10,11 @@ must now be specified in the client settings instead. See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Storage Client Settings]. + +==== Ingest Geoip Plugin + +* In earlier versions, database files have been stored as gzip compressed files with the extension `.gz` to +save disk space. As a consequence, database files had to be loaded in memory. Now the default database files +that are stored uncompressed as `.mmdb` files which allows to memory-map them and save heap memory. Any +custom database files must also be stored uncompressed. Consequently, the `database_file` property in any +ingest pipelines that use the Geoip Processor must refer to the uncompressed database files as well. \ No newline at end of file From bf5cb76905a01bf02f5eae9ebe8e3a3979ca42e4 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 10:07:02 -0400 Subject: [PATCH 43/89] Fix packaging scripts references to /etc/elasticsearch We no longer source the environment file in the packaging scripts yet we had leftover references to variables defined by those environment files. This commit cleans these up. --- .../packages/src/common/scripts/postinst | 16 ++++++---------- distribution/packages/src/common/scripts/prerm | 11 +++-------- 2 files changed, 9 insertions(+), 18 deletions(-) diff --git a/distribution/packages/src/common/scripts/postinst b/distribution/packages/src/common/scripts/postinst index 2edd78785f9d2..abc7c91b81d78 100644 --- a/distribution/packages/src/common/scripts/postinst +++ b/distribution/packages/src/common/scripts/postinst @@ -95,19 +95,15 @@ chown -R root:elasticsearch /etc/elasticsearch chmod g+s /etc/elasticsearch chmod 0750 /etc/elasticsearch -if [ -f /etc/default/elasticsearch ]; then - chown root:elasticsearch /etc/default/elasticsearch +if [ -f ${path.env} ]; then + chown root:elasticsearch ${path.env} fi -if [ -f /etc/sysconfig/elasticsearch ]; then - chown root:elasticsearch /etc/sysconfig/elasticsearch -fi - -if [ ! -f "$ES_PATH_CONF"/elasticsearch.keystore ]; then +if [ ! -f /etc/elasticsearch/elasticsearch.keystore ]; then /usr/share/elasticsearch/bin/elasticsearch-keystore create - chown root:elasticsearch "$ES_PATH_CONF"/elasticsearch.keystore - chmod 660 "$ES_PATH_CONF"/elasticsearch.keystore - md5sum "$ES_PATH_CONF"/elasticsearch.keystore > "$ES_PATH_CONF"/.elasticsearch.keystore.initial_md5sum + chown root:elasticsearch /etc/elasticsearch/elasticsearch.keystore + chmod 660 /etc/elasticsearch/elasticsearch.keystore + md5sum /etc/elasticsearch/elasticsearch.keystore > /etc/elasticsearch/.elasticsearch.keystore.initial_md5sum fi ${scripts.footer} diff --git a/distribution/packages/src/common/scripts/prerm b/distribution/packages/src/common/scripts/prerm index f13d23cdc9b50..f5cf67ca0b662 100644 --- a/distribution/packages/src/common/scripts/prerm +++ b/distribution/packages/src/common/scripts/prerm @@ -42,11 +42,6 @@ case "$1" in ;; esac -ES_ENV_FILE="${path.env}" -if [ -f "$ES_ENV_FILE" ]; then - . "$ES_ENV_FILE" -fi - # Stops the service if [ "$STOP_REQUIRED" = "true" ]; then echo -n "Stopping elasticsearch service..." @@ -70,9 +65,9 @@ if [ "$STOP_REQUIRED" = "true" ]; then echo " OK" fi -if [ -f "$ES_PATH_CONF"/elasticsearch.keystore ]; then - if md5sum --status -c "$ES_PATH_CONF"/.elasticsearch.keystore.initial_md5sum; then - rm "$ES_PATH_CONF"/elasticsearch.keystore "$ES_PATH_CONF"/.elasticsearch.keystore.initial_md5sum +if [ -f /etc/elasticsearch/elasticsearch.keystore ]; then + if md5sum --status -c /etc/elasticsearch/.elasticsearch.keystore.initial_md5sum; then + rm /etc/elasticsearch/elasticsearch.keystore /etc/elasticsearch/.elasticsearch.keystore.initial_md5sum fi fi From c1c5a0f7f117ffe9293de0d4b65c84fc8c0cbc74 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 12:49:33 -0400 Subject: [PATCH 44/89] Copy Lucene IOUtils (#29012) As we have factored Elasticsearch into smaller libraries, we have ended up in a situation that some of the dependencies of Elasticsearch are not available to code that depends on these smaller libraries but not server Elasticsearch. This is a good thing, this was one of the goals of separating Elasticsearch into smaller libraries, to shed some of the dependencies from other components of the system. However, this now means that simple utility methods from Lucene that we rely on are no longer available everywhere. This commit copies IOUtils (with some small formatting changes for our codebase) into the fold so that other components of the system can rely on these methods where they no longer depend on Lucene. --- .../resources/forbidden/es-all-signatures.txt | 2 + .../plugins/InstallPluginCommand.java | 2 +- .../org/elasticsearch/plugins/PluginCli.java | 2 +- .../plugins/RemovePluginCommand.java | 2 +- .../core/internal/io/IOUtils.java | 291 ++++++++++++++++++ .../core/internal/io/IOUtilsTests.java | 229 ++++++++++++++ .../org/elasticsearch/painless/Debugger.java | 6 +- .../painless/PainlessDocGenerator.java | 2 +- .../mapper/ScaledFloatFieldTypeTests.java | 2 +- .../discovery/ec2/Ec2DiscoveryPlugin.java | 2 +- .../discovery/ec2/Ec2NameResolver.java | 2 +- .../discovery/gce/GceDiscoveryPlugin.java | 2 +- .../geoip/DatabaseReaderLazyLoader.java | 2 +- .../ingest/geoip/IngestGeoIpPlugin.java | 2 +- .../repositories/azure/AzureRepositoryF.java | 2 +- .../analyzing/XAnalyzingSuggester.java | 8 +- .../analyze/TransportAnalyzeAction.java | 2 +- .../elasticsearch/bootstrap/Bootstrap.java | 2 +- .../org/elasticsearch/bootstrap/Spawner.java | 2 +- .../bootstrap/SystemCallFilter.java | 2 +- .../client/transport/TransportClient.java | 2 +- .../TransportClientNodesService.java | 2 +- .../metadata/MetaDataMappingService.java | 2 +- .../common/blobstore/fs/FsBlobContainer.java | 2 +- .../common/blobstore/fs/FsBlobStore.java | 2 +- .../common/io/FileSystemUtils.java | 2 +- .../org/elasticsearch/common/io/Streams.java | 2 +- .../common/lease/Releasables.java | 2 +- .../common/settings/Settings.java | 2 +- .../common/util/IndexFolderUpgrader.java | 2 +- .../xcontent/json/JsonXContentParser.java | 2 +- .../zen/PublishClusterStateAction.java | 2 +- .../discovery/zen/UnicastZenPing.java | 2 +- .../discovery/zen/ZenDiscovery.java | 2 +- .../elasticsearch/env/NodeEnvironment.java | 2 +- .../gateway/MetaDataStateFormat.java | 2 +- .../org/elasticsearch/index/IndexService.java | 2 +- .../index/analysis/AnalysisRegistry.java | 2 +- .../index/analysis/IndexAnalyzers.java | 2 +- .../elasticsearch/index/cache/IndexCache.java | 2 +- .../index/engine/InternalEngine.java | 2 +- .../index/search/QueryStringQueryParser.java | 4 +- .../elasticsearch/index/shard/IndexShard.java | 2 +- .../shard/IndexShardOperationPermits.java | 2 +- .../elasticsearch/index/shard/ShardPath.java | 2 +- .../blobstore/SlicedInputStream.java | 2 +- .../org/elasticsearch/index/store/Store.java | 2 +- .../index/translog/Translog.java | 2 +- .../index/translog/TranslogWriter.java | 2 +- .../translog/TruncateTranslogCommand.java | 2 +- .../elasticsearch/indices/IndicesService.java | 2 +- .../indices/analysis/HunspellService.java | 2 +- .../recovery/RecoverySourceHandler.java | 2 +- .../java/org/elasticsearch/node/Node.java | 2 +- .../org/elasticsearch/node/NodeService.java | 2 +- .../elasticsearch/plugins/PluginSecurity.java | 2 +- .../elasticsearch/script/ScriptService.java | 2 +- .../elasticsearch/search/SearchService.java | 16 +- .../phrase/DirectCandidateGenerator.java | 2 +- .../tasks/TaskResultsService.java | 5 +- .../elasticsearch/threadpool/ThreadPool.java | 2 +- .../CompressibleBytesOutputStream.java | 2 +- .../transport/RemoteClusterConnection.java | 2 +- .../transport/RemoteClusterService.java | 2 +- .../elasticsearch/transport/TcpTransport.java | 2 +- .../transport/TransportService.java | 2 +- .../BroadcastReplicationTests.java | 2 +- .../bootstrap/BootstrapTests.java | 2 +- .../cluster/allocation/ClusterRerouteIT.java | 2 +- .../routing/OperationRoutingTests.java | 2 +- .../lucene/index/ESDirectoryReaderTests.java | 10 +- .../lucene/index/FreqTermsEnumTests.java | 2 +- .../settings/KeyStoreCommandTestCase.java | 2 +- .../common/settings/KeyStoreWrapperTests.java | 2 +- .../discovery/DiscoveryModuleTests.java | 2 +- .../single/SingleNodeDiscoveryIT.java | 2 +- .../single/SingleNodeDiscoveryTests.java | 2 +- .../discovery/zen/UnicastZenPingTests.java | 2 +- .../discovery/zen/ZenDiscoveryUnitTests.java | 2 +- .../env/NodeEnvironmentTests.java | 2 +- .../elasticsearch/index/IndexModuleTests.java | 2 +- .../cache/bitset/BitSetFilterCacheTests.java | 2 +- .../index/engine/InternalEngineTests.java | 2 +- .../plain/HalfFloatFielddataTests.java | 2 +- .../index/mapper/DateFieldTypeTests.java | 2 +- .../index/mapper/NumberFieldTypeTests.java | 2 +- .../index/mapper/TypeFieldTypeTests.java | 2 +- .../RecoveryDuringReplicationTests.java | 2 +- .../GlobalCheckpointSyncActionTests.java | 2 +- .../shard/IndexSearcherWrapperTests.java | 2 +- .../index/shard/IndexShardIT.java | 2 +- .../index/shard/IndexShardTests.java | 2 +- .../index/shard/RefreshListenersTests.java | 2 +- .../index/shard/ShardUtilsTests.java | 2 +- .../index/shard/StoreRecoveryTests.java | 2 +- .../elasticsearch/index/store/StoreTests.java | 2 +- .../translog/TranslogDeletionPolicyTests.java | 2 +- .../index/translog/TranslogTests.java | 2 +- .../indices/IndicesQueryCacheTests.java | 2 +- .../indices/IndicesRequestCacheTests.java | 2 +- .../recovery/RecoverySourceHandlerTests.java | 2 +- .../BlobStoreRepositoryRestoreTests.java | 2 +- .../search/SearchCancellationTests.java | 2 +- .../bucket/GlobalAggregatorTests.java | 2 +- .../profile/query/QueryProfilerTests.java | 2 +- .../SharedClusterSnapshotRestoreIT.java | 2 +- .../RemoteClusterConnectionTests.java | 2 +- .../transport/RemoteClusterServiceTests.java | 2 +- .../transport/TransportActionProxyTests.java | 2 +- .../watcher/FileWatcherTests.java | 4 +- .../index/engine/EngineTestCase.java | 2 +- .../index/shard/IndexShardTestCase.java | 2 +- .../test/AbstractQueryTestCase.java | 2 +- .../elasticsearch/test/ESIntegTestCase.java | 2 +- .../test/ESSingleNodeTestCase.java | 2 +- .../test/InternalTestCluster.java | 2 +- .../test/rest/ESRestTestCase.java | 2 +- .../AbstractSimpleTransportTestCase.java | 2 +- .../transport/MockTcpTransport.java | 2 +- .../test/test/InternalTestClusterTests.java | 2 +- .../nio/channel/ChannelFactoryTests.java | 2 +- 121 files changed, 660 insertions(+), 137 deletions(-) create mode 100644 libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java create mode 100644 libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java diff --git a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt index f1d271d602ce1..130984eb58f17 100644 --- a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt @@ -48,3 +48,5 @@ java.nio.channels.SocketChannel#connect(java.net.SocketAddress) # org.elasticsearch.common.Booleans#parseBoolean(java.lang.String) directly on the string. @defaultMessage use org.elasticsearch.common.Booleans#parseBoolean(java.lang.String) java.lang.Boolean#getBoolean(java.lang.String) + +org.apache.lucene.util.IOUtils @ use @org.elasticsearch.core.internal.io instead diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 4648f18ffb812..b7f201b70aa46 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -24,7 +24,7 @@ import org.apache.lucene.search.spell.LevensteinDistance; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.cli.EnvironmentAwareCommand; diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/PluginCli.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/PluginCli.java index aac22302d3be4..9d8ccd4438d8a 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/PluginCli.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/PluginCli.java @@ -19,7 +19,7 @@ package org.elasticsearch.plugins; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.LoggingAwareMultiCommand; import org.elasticsearch.cli.MultiCommand; diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java index 4cd83e329b158..16f5fdb47409a 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java @@ -21,7 +21,7 @@ import joptsimple.OptionSet; import joptsimple.OptionSpec; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.cli.EnvironmentAwareCommand; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.Terminal; diff --git a/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java new file mode 100644 index 0000000000000..7507327199baf --- /dev/null +++ b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java @@ -0,0 +1,291 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.elasticsearch.core.internal.io; + +import java.io.Closeable; +import java.io.IOException; +import java.nio.channels.FileChannel; +import java.nio.file.FileVisitResult; +import java.nio.file.FileVisitor; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.Arrays; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * Utilities for common I/O methods. Borrowed heavily from Lucene (org.apache.lucene.util.IOUtils). + */ +public final class IOUtils { + + private IOUtils() { + + } + + /** + * Closes all given Closeables. Some of the Closeables may be null; they are ignored. After everything is closed, the + * method either throws the first exception it hit while closing, or completes normally if there were no exceptions. + * + * @param objects objects to close + */ + public static void close(final Closeable... objects) throws IOException { + close(Arrays.asList(objects)); + } + + /** + * Closes all given {@link Closeable}s. + * + * @param objects objects to close + * + * @see #close(Closeable...) + */ + public static void close(final Iterable objects) throws IOException { + Throwable th = null; + + for (final Closeable object : objects) { + try { + if (object != null) { + object.close(); + } + } catch (final Throwable t) { + addSuppressed(th, t); + if (th == null) { + th = t; + } + } + } + + if (th != null) { + throw rethrowAlways(th); + } + } + + /** + * Closes all given {@link Closeable}s, suppressing all thrown exceptions. Some of the {@link Closeable}s may be null, they are ignored. + * + * @param objects objects to close + */ + public static void closeWhileHandlingException(final Closeable... objects) { + closeWhileHandlingException(Arrays.asList(objects)); + } + + /** + * Closes all given {@link Closeable}s, suppressing all thrown exceptions. + * + * @param objects objects to close + * + * @see #closeWhileHandlingException(Closeable...) + */ + public static void closeWhileHandlingException(final Iterable objects) { + for (final Closeable object : objects) { + // noinspection EmptyCatchBlock + try { + if (object != null) { + object.close(); + } + } catch (final Throwable t) { + + } + } + } + + /** + * Adds a {@link Throwable} to the list of suppressed {@link Exception}s of the first {@link Throwable}. + * + * @param exception the exception to add a suppression to, if non-null + * @param suppressed the exception to suppress + */ + private static void addSuppressed(final Throwable exception, final Throwable suppressed) { + if (exception != null && suppressed != null) { + exception.addSuppressed(suppressed); + } + } + + /** + * This utility method takes a previously caught (non-null) {@link Throwable} and rethrows either the original argument if it was a + * subclass of the {@link IOException} or an {@link RuntimeException} with the cause set to the argument. + *

+ * This method never returns any value, even though it declares a return value of type {@link Error}. The return + * value declaration is very useful to let the compiler know that the code path following the invocation of this method is unreachable. + * So in most cases the invocation of this method will be guarded by an {@code if} and used together with a {@code throw} statement, as + * in: + *

+ *

{@code
+     *   if (t != null) throw IOUtils.rethrowAlways(t)
+     * }
+     * 
+ * + * @param th the throwable to rethrow; must not be null + * @return this method always results in an exception, it never returns any value; see method documentation for details and usage + * example + * @throws IOException if the argument was an instance of {@link IOException} + * @throws RuntimeException with the {@link RuntimeException#getCause()} set to the argument, if it was not an instance of + * {@link IOException} + */ + private static Error rethrowAlways(final Throwable th) throws IOException, RuntimeException { + if (th == null) { + throw new AssertionError("rethrow argument must not be null."); + } + + if (th instanceof IOException) { + throw (IOException) th; + } + + if (th instanceof RuntimeException) { + throw (RuntimeException) th; + } + + if (th instanceof Error) { + throw (Error) th; + } + + throw new RuntimeException(th); + } + + /** + * Deletes all given files, suppressing all thrown {@link IOException}s. Some of the files may be null, if so they are ignored. + * + * @param files the paths of files to delete + */ + public static void deleteFilesIgnoringExceptions(final Path... files) { + deleteFilesIgnoringExceptions(Arrays.asList(files)); + } + + /** + * Deletes all given files, suppressing all thrown {@link IOException}s. Some of the files may be null, if so they are ignored. + * + * @param files the paths of files to delete + */ + public static void deleteFilesIgnoringExceptions(final Collection files) { + for (final Path name : files) { + if (name != null) { + // noinspection EmptyCatchBlock + try { + Files.delete(name); + } catch (final Throwable ignored) { + + } + } + } + } + + /** + * Deletes one or more files or directories (and everything underneath it). + * + * @throws IOException if any of the given files (or their sub-hierarchy files in case of directories) cannot be removed. + */ + public static void rm(final Path... locations) throws IOException { + final LinkedHashMap unremoved = rm(new LinkedHashMap<>(), locations); + if (!unremoved.isEmpty()) { + final StringBuilder b = new StringBuilder("could not remove the following files (in the order of attempts):\n"); + for (final Map.Entry kv : unremoved.entrySet()) { + b.append(" ") + .append(kv.getKey().toAbsolutePath()) + .append(": ") + .append(kv.getValue()) + .append("\n"); + } + throw new IOException(b.toString()); + } + } + + private static LinkedHashMap rm(final LinkedHashMap unremoved, final Path... locations) { + if (locations != null) { + for (final Path location : locations) { + // TODO: remove this leniency + if (location != null && Files.exists(location)) { + try { + Files.walkFileTree(location, new FileVisitor() { + @Override + public FileVisitResult preVisitDirectory(final Path dir, final BasicFileAttributes attrs) throws IOException { + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult postVisitDirectory(final Path dir, final IOException impossible) throws IOException { + assert impossible == null; + + try { + Files.delete(dir); + } catch (final IOException e) { + unremoved.put(dir, e); + } + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { + try { + Files.delete(file); + } catch (final IOException exc) { + unremoved.put(file, exc); + } + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFileFailed(final Path file, final IOException exc) throws IOException { + if (exc != null) { + unremoved.put(file, exc); + } + return FileVisitResult.CONTINUE; + } + }); + } catch (final IOException impossible) { + throw new AssertionError("visitor threw exception", impossible); + } + } + } + } + return unremoved; + } + + // TODO: replace with constants class if needed (cf. org.apache.lucene.util.Constants) + private static final boolean LINUX = System.getProperty("os.name").startsWith("Linux"); + private static final boolean MAC_OS_X = System.getProperty("os.name").startsWith("Mac OS X"); + + /** + * Ensure that any writes to the given file is written to the storage device that contains it. The {@code isDir} parameter specifies + * whether or not the path to sync is a directory. This is needed because we open for read and ignore an {@link IOException} since not + * all filesystems and operating systems support fsyncing on a directory. For regular files we must open for write for the fsync to have + * an effect. + * + * @param fileToSync the file to fsync + * @param isDir if true, the given file is a directory (we open for read and ignore {@link IOException}s, because not all file + * systems and operating systems allow to fsync on a directory) + */ + public static void fsync(final Path fileToSync, final boolean isDir) throws IOException { + try (FileChannel file = FileChannel.open(fileToSync, isDir ? StandardOpenOption.READ : StandardOpenOption.WRITE)) { + file.force(true); + } catch (final IOException ioe) { + if (isDir) { + assert (LINUX || MAC_OS_X) == false : + "on Linux and MacOSX fsyncing a directory should not throw IOException, "+ + "we just don't want to rely on that in production (undocumented); got: " + ioe; + // ignore exception if it is a directory + return; + } + // throw original exception + throw ioe; + } + } + +} diff --git a/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java b/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java new file mode 100644 index 0000000000000..c133a9ddc1f3c --- /dev/null +++ b/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.elasticsearch.core.internal.io; + +import org.apache.lucene.mockfile.FilterFileSystemProvider; +import org.apache.lucene.mockfile.FilterPath; +import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.test.ESTestCase; + +import java.io.Closeable; +import java.io.IOException; +import java.io.OutputStream; +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.nio.file.AccessDeniedException; +import java.nio.file.FileSystem; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; + +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasToString; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class IOUtilsTests extends ESTestCase { + + public void testCloseArray() throws IOException { + runTestClose(Function.identity(), IOUtils::close); + } + + public void testCloseIterable() throws IOException { + runTestClose(Arrays::asList, IOUtils::close); + } + + private void runTestClose(final Function function, final CheckedConsumer close) throws IOException { + final int numberOfCloseables = randomIntBetween(0, 7); + final Closeable[] closeables = new Closeable[numberOfCloseables]; + for (int i = 0; i < numberOfCloseables; i++) { + closeables[i] = mock(Closeable.class); + } + close.accept(function.apply(closeables)); + for (int i = 0; i < numberOfCloseables; i++) { + verify(closeables[i]).close(); + verifyNoMoreInteractions(closeables[i]); + } + } + + public void testCloseArrayWithIOExceptions() throws IOException { + runTestCloseWithIOExceptions(Function.identity(), IOUtils::close); + } + + public void testCloseIterableWithIOExceptions() throws IOException { + runTestCloseWithIOExceptions(Arrays::asList, IOUtils::close); + } + + private void runTestCloseWithIOExceptions( + final Function function, final CheckedConsumer close) throws IOException { + final int numberOfCloseables = randomIntBetween(1, 8); + final Closeable[] closeables = new Closeable[numberOfCloseables]; + final List indexesThatThrow = new ArrayList<>(numberOfCloseables); + for (int i = 0; i < numberOfCloseables - 1; i++) { + final Closeable closeable = mock(Closeable.class); + if (randomBoolean()) { + indexesThatThrow.add(i); + doThrow(new IOException(Integer.toString(i))).when(closeable).close(); + } + closeables[i] = closeable; + } + + // ensure that at least one always throws + final Closeable closeable = mock(Closeable.class); + if (indexesThatThrow.isEmpty() || randomBoolean()) { + indexesThatThrow.add(numberOfCloseables - 1); + doThrow(new IOException(Integer.toString(numberOfCloseables - 1))).when(closeable).close(); + } + closeables[numberOfCloseables - 1] = closeable; + + final IOException e = expectThrows(IOException.class, () -> close.accept(function.apply(closeables))); + assertThat(e.getMessage(), equalTo(Integer.toString(indexesThatThrow.get(0)))); + assertThat(e.getSuppressed(), arrayWithSize(indexesThatThrow.size() - 1)); + for (int i = 1; i < indexesThatThrow.size(); i++) { + assertNotNull(e.getSuppressed()[i - 1]); + assertThat(e.getSuppressed()[i - 1].getMessage(), equalTo(Integer.toString(indexesThatThrow.get(i)))); + } + } + + public void testDeleteFilesIgnoringExceptionsArray() throws IOException { + runDeleteFilesIgnoringExceptionsTest(Function.identity(), IOUtils::deleteFilesIgnoringExceptions); + } + + public void testDeleteFilesIgnoringExceptionsIterable() throws IOException { + runDeleteFilesIgnoringExceptionsTest(Arrays::asList, IOUtils::deleteFilesIgnoringExceptions); + } + + private void runDeleteFilesIgnoringExceptionsTest( + final Function function, CheckedConsumer deleteFilesIgnoringExceptions) throws IOException { + final int numberOfFiles = randomIntBetween(0, 7); + final Path[] files = new Path[numberOfFiles]; + for (int i = 0; i < numberOfFiles; i++) { + if (randomBoolean()) { + files[i] = createTempFile(); + } else { + final Path temporary = createTempFile(); + files[i] = PathUtils.get(temporary.toString(), randomAlphaOfLength(8)); + Files.delete(temporary); + } + } + deleteFilesIgnoringExceptions.accept(function.apply(files)); + for (int i = 0; i < numberOfFiles; i++) { + assertFalse(files[i].toString(), Files.exists(files[i])); + } + } + + public void testRm() throws IOException { + runTestRm(false); + } + + public void testRmWithIOExceptions() throws IOException { + runTestRm(true); + } + + public void runTestRm(final boolean exception) throws IOException { + final int numberOfLocations = randomIntBetween(0, 7); + final Path[] locations = new Path[numberOfLocations]; + final List locationsThrowingException = new ArrayList<>(numberOfLocations); + for (int i = 0; i < numberOfLocations; i++) { + if (exception && randomBoolean()) { + final Path location = createTempDir(); + final FileSystem fs = + new AccessDeniedWhileDeletingFileSystem(location.getFileSystem()).getFileSystem(URI.create("file:///")); + final Path wrapped = new FilterPath(location, fs); + locations[i] = wrapped.resolve(randomAlphaOfLength(8)); + Files.createDirectory(locations[i]); + locationsThrowingException.add(locations[i]); + } else { + // we create a tree of files that IOUtils#rm should delete + locations[i] = createTempDir(); + Path location = locations[i]; + while (true) { + location = Files.createDirectory(location.resolve(randomAlphaOfLength(8))); + if (rarely() == false) { + Files.createTempFile(location, randomAlphaOfLength(8), null); + break; + } + } + } + } + + if (locationsThrowingException.isEmpty()) { + IOUtils.rm(locations); + } else { + final IOException e = expectThrows(IOException.class, () -> IOUtils.rm(locations)); + assertThat(e, hasToString(containsString("could not remove the following files (in the order of attempts):"))); + for (final Path locationThrowingException : locationsThrowingException) { + assertThat(e, hasToString(containsString("access denied while trying to delete file [" + locationThrowingException + "]"))); + } + } + + for (int i = 0; i < numberOfLocations; i++) { + if (locationsThrowingException.contains(locations[i]) == false) { + assertFalse(locations[i].toString(), Files.exists(locations[i])); + } + } + } + + private static final class AccessDeniedWhileDeletingFileSystem extends FilterFileSystemProvider { + + /** + * Create a new instance, wrapping {@code delegate}. + */ + AccessDeniedWhileDeletingFileSystem(final FileSystem delegate) { + super("access_denied://", delegate); + } + + @Override + public void delete(final Path path) throws IOException { + if (Files.exists(path)) { + throw new AccessDeniedException("access denied while trying to delete file [" + path + "]"); + } + super.delete(path); + } + + } + + public void testFsyncDirectory() throws Exception { + final Path path = createTempDir().toRealPath(); + final Path subPath = path.resolve(randomAlphaOfLength(8)); + Files.createDirectories(subPath); + IOUtils.fsync(subPath, true); + // no exception + } + + public void testFsyncFile() throws IOException { + final Path path = createTempDir().toRealPath(); + final Path subPath = path.resolve(randomAlphaOfLength(8)); + Files.createDirectories(subPath); + final Path file = subPath.resolve(randomAlphaOfLength(8)); + try (OutputStream o = Files.newOutputStream(file)) { + o.write("0\n".getBytes(StandardCharsets.US_ASCII)); + } + IOUtils.fsync(file, false); + // no exception + } + +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java index e29986a3c87de..d17b9e55ab0c1 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.painless.spi.Whitelist; import org.objectweb.asm.util.Textifier; @@ -42,11 +41,10 @@ static String toString(Class iface, String source, CompilerSettings settings) try { new Compiler(iface, new Definition(Whitelist.BASE_WHITELISTS)) .compile("", source, settings, textifier); - } catch (Exception e) { + } catch (RuntimeException e) { textifier.print(outputWriter); e.addSuppressed(new Exception("current bytecode: \n" + output)); - IOUtils.reThrowUnchecked(e); - throw new AssertionError(); + throw e; } textifier.print(outputWriter); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index ac4dcfea3bf5a..ed38f4c511f59 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -20,7 +20,7 @@ package org.elasticsearch.painless; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.painless.Definition.Field; diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java index 83039ebd88319..1d88022b3e0e0 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java @@ -29,7 +29,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java index 50c5dac4b75cf..1617e4aebfa46 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java @@ -21,7 +21,7 @@ import com.amazonaws.util.json.Jackson; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.SetOnce; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.SuppressForbidden; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2NameResolver.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2NameResolver.java index 1793c8154b0a8..92bd01dd9aec7 100755 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2NameResolver.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2NameResolver.java @@ -19,7 +19,7 @@ package org.elasticsearch.discovery.ec2; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.network.NetworkService.CustomNameResolver; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index d26c15d457d9a..552925c0f386d 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -22,7 +22,7 @@ import com.google.api.client.http.HttpHeaders; import com.google.api.client.util.ClassInfo; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.SetOnce; import org.elasticsearch.cloud.gce.GceInstancesService; import org.elasticsearch.cloud.gce.GceInstancesServiceImpl; diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java index f73d2ca13c14a..b420e8d0a1198 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java @@ -20,7 +20,7 @@ import com.maxmind.geoip2.DatabaseReader; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.logging.Loggers; diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 10a65d0274228..c9c742d178980 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -23,7 +23,7 @@ import com.maxmind.db.NodeCache; import com.maxmind.db.Reader; import com.maxmind.geoip2.DatabaseReader; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Setting; diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryF.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryF.java index bfa0621912724..f27ed4d99d032 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryF.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryF.java @@ -19,7 +19,7 @@ package org.elasticsearch.repositories.azure; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.MockNode; diff --git a/server/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java b/server/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java index 312b4b3dd0b34..3b418e90ef277 100644 --- a/server/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java +++ b/server/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java @@ -38,7 +38,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; -import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.OfflineSorter; @@ -59,6 +58,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.core.internal.io.IOUtils; import java.io.IOException; import java.io.InputStream; @@ -697,7 +697,11 @@ public void build(InputIterator iterator) throws IOException { } finally { IOUtils.closeWhileHandlingException(reader, writer); - IOUtils.deleteFilesIgnoringExceptions(tempDir, tempInput.getName(), tempSortedFileName); + try { + tempDir.deleteFile(tempInput.getName()); + } finally { + tempDir.deleteFile(tempSortedFileName); + } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 0b791cb78506a..0b05695c7f04b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -27,7 +27,7 @@ import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.support.ActionFilters; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index c035d4a737fb8..83641129906ae 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -26,7 +26,7 @@ import org.apache.logging.log4j.core.appender.ConsoleAppender; import org.apache.logging.log4j.core.config.Configurator; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.StringHelper; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java index 08731522a31a3..c0705d9f863ba 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java @@ -20,7 +20,7 @@ package org.elasticsearch.bootstrap; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Platforms; import org.elasticsearch.plugins.PluginInfo; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/SystemCallFilter.java b/server/src/main/java/org/elasticsearch/bootstrap/SystemCallFilter.java index 73814a4311af0..c6667bee4cd2b 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/SystemCallFilter.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/SystemCallFilter.java @@ -28,7 +28,7 @@ import com.sun.jna.ptr.PointerByReference; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.logging.Loggers; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 6d422488bdb8f..5e89dc256d8b2 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -19,7 +19,7 @@ package org.elasticsearch.client.transport; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionModule; diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 3e50b4d74c916..5d31e74bef621 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -22,7 +22,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index fbe941f5021fb..2255081522891 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -22,7 +22,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest; import org.elasticsearch.cluster.AckedClusterStateTaskListener; diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index 1e384109aebce..df2e7a123a3ae 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.blobstore.fs; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java index 60055130fbe1d..29f3b2f7e15fa 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.blobstore.fs; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; diff --git a/server/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java b/server/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java index a976fe779db70..89abf956a0b0a 100644 --- a/server/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java +++ b/server/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java @@ -21,7 +21,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; diff --git a/server/src/main/java/org/elasticsearch/common/io/Streams.java b/server/src/main/java/org/elasticsearch/common/io/Streams.java index b6b9061ad7e49..cc4542c8390c1 100644 --- a/server/src/main/java/org/elasticsearch/common/io/Streams.java +++ b/server/src/main/java/org/elasticsearch/common/io/Streams.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.io; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStream; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/common/lease/Releasables.java b/server/src/main/java/org/elasticsearch/common/lease/Releasables.java index bd7b2a6e772a4..6c928b29a841f 100644 --- a/server/src/main/java/org/elasticsearch/common/lease/Releasables.java +++ b/server/src/main/java/org/elasticsearch/common/lease/Releasables.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.lease; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import java.io.IOException; import java.io.UncheckedIOException; diff --git a/server/src/main/java/org/elasticsearch/common/settings/Settings.java b/server/src/main/java/org/elasticsearch/common/settings/Settings.java index 3d9ba8a2f9ca7..7611135d1ffaf 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -20,7 +20,7 @@ package org.elasticsearch.common.settings; import org.apache.logging.log4j.Level; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; diff --git a/server/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java b/server/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java index 528982385ac54..3ee7d1f23add2 100644 --- a/server/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java +++ b/server/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java @@ -22,7 +22,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java b/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java index 70461e07ea151..14bb21e8243bf 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentLocation; diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java index 95de654928ea0..13bcf1f15f56a 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.discovery.zen; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index afd3329845952..312c954cf6484 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -24,7 +24,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.store.AlreadyClosedException; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index e079e827cd1f0..066299d07fbd1 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -22,7 +22,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 591a51a62092b..326393ac84ee3 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -30,7 +30,7 @@ import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.NativeFSLockFactory; import org.apache.lucene.store.SimpleFSDirectory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index 1faa37c6a33a9..0ac421b699faa 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -30,7 +30,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.OutputStreamIndexOutput; import org.apache.lucene.store.SimpleFSDirectory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index df71471a3caae..135eb15942f8a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -24,7 +24,7 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index ec8d17929a31f..2920e9ae519a1 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -20,7 +20,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java b/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java index f3200d606fb45..7aeb9603b5c62 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.index.analysis; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; diff --git a/server/src/main/java/org/elasticsearch/index/cache/IndexCache.java b/server/src/main/java/org/elasticsearch/index/cache/IndexCache.java index 3e853076ca4de..65a5fd9c56844 100644 --- a/server/src/main/java/org/elasticsearch/index/cache/IndexCache.java +++ b/server/src/main/java/org/elasticsearch/index/cache/IndexCache.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.cache; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index dc9277a76e351..233644a5a8fc2 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -43,7 +43,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.InfoStream; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 8706de9fde3a9..5cbd4d43ae6e0 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -42,13 +42,11 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.lucene.all.AllField; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.mapper.AllFieldMapper; -import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index c8330018b3e9f..232b3d92cce59 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -37,7 +37,7 @@ import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.Assertions; import org.elasticsearch.ElasticsearchException; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java index f4aa0f699c86c..b5d1fe9c998ae 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.Assertions; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -30,6 +29,7 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardPath.java b/server/src/main/java/org/elasticsearch/index/shard/ShardPath.java index 4a18bcdd5c79a..99c1e96cfb793 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardPath.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardPath.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.NodeEnvironment; diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java index f49459a280b06..876f5f948c851 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.index.snapshots.blobstore; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import java.io.IOException; import java.io.InputStream; diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 02713348b86b8..7b73a945d6e31 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -47,7 +47,7 @@ import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.Version; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index cbcba3fffbeb7..46282d973801e 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -23,7 +23,7 @@ import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.index.Term; import org.apache.lucene.store.AlreadyClosedException; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.UUIDs; diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 1eed393208c9c..a1e7e18801445 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -23,7 +23,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.OutputStreamDataOutput; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Assertions; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; diff --git a/server/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java b/server/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java index 222e3e13d65e1..164d8fee956dd 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java @@ -34,7 +34,7 @@ import org.apache.lucene.store.NativeFSLockFactory; import org.apache.lucene.store.OutputStreamDataOutput; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cli.EnvironmentAwareCommand; import org.elasticsearch.cli.Terminal; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 15ab826445747..f62ee380145bb 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -25,7 +25,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceAlreadyExistsException; diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index a324e8282a15a..1712f90c206ec 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -23,7 +23,7 @@ import org.apache.lucene.analysis.hunspell.Dictionary; import org.apache.lucene.store.Directory; import org.apache.lucene.store.SimpleFSDirectory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.FileSystemUtils; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 1b174f8ebb6fb..42b28506c0506 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -29,7 +29,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.RateLimiter; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 0cc11168a96d4..429bf0af94811 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -21,7 +21,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.SetOnce; import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; diff --git a/server/src/main/java/org/elasticsearch/node/NodeService.java b/server/src/main/java/org/elasticsearch/node/NodeService.java index 11106d41beaf1..0e19b5a650221 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeService.java +++ b/server/src/main/java/org/elasticsearch/node/NodeService.java @@ -19,7 +19,7 @@ package org.elasticsearch.node; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginSecurity.java b/server/src/main/java/org/elasticsearch/plugins/PluginSecurity.java index 5a1318c5c46d3..0b8ebde0b32c4 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginSecurity.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginSecurity.java @@ -19,7 +19,7 @@ package org.elasticsearch.plugins; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.Terminal.Verbosity; diff --git a/server/src/main/java/org/elasticsearch/script/ScriptService.java b/server/src/main/java/org/elasticsearch/script/ScriptService.java index b6ab8a0209d73..9fff02b611f7b 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptService.java @@ -19,7 +19,7 @@ package org.elasticsearch.script; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 4caa5d7967f97..a15997131a439 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -21,7 +21,7 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -162,7 +162,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private volatile TimeValue defaultSearchTimeout; private volatile boolean defaultAllowPartialSearchResults; - + private volatile boolean lowLevelCancellation; private final Cancellable keepAliveReaper; @@ -199,10 +199,10 @@ public SearchService(ClusterService clusterService, IndicesService indicesServic clusterService.getClusterSettings().addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout); defaultAllowPartialSearchResults = DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.get(settings); - clusterService.getClusterSettings().addSettingsUpdateConsumer(DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS, + clusterService.getClusterSettings().addSettingsUpdateConsumer(DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS, this::setDefaultAllowPartialSearchResults); - - + + lowLevelCancellation = LOW_LEVEL_CANCELLATION_SETTING.get(settings); clusterService.getClusterSettings().addSettingsUpdateConsumer(LOW_LEVEL_CANCELLATION_SETTING, this::setLowLevelCancellation); } @@ -228,11 +228,11 @@ private void setDefaultSearchTimeout(TimeValue defaultSearchTimeout) { private void setDefaultAllowPartialSearchResults(boolean defaultAllowPartialSearchResults) { this.defaultAllowPartialSearchResults = defaultAllowPartialSearchResults; } - + public boolean defaultAllowPartialSearchResults() { return defaultAllowPartialSearchResults; - } - + } + private void setLowLevelCancellation(Boolean lowLevelCancellation) { this.lowLevelCancellation = lowLevelCancellation; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index 4d6fceba869ff..678b00aa13dca 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -35,7 +35,7 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import java.io.CharArrayReader; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java index f661095d6bd47..0c6c22671e8dc 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java @@ -20,7 +20,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -49,6 +49,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.Map; /** @@ -182,7 +183,7 @@ public String taskResultIndexMapping() { try (InputStream is = getClass().getResourceAsStream(TASK_RESULT_INDEX_MAPPING_FILE)) { ByteArrayOutputStream out = new ByteArrayOutputStream(); Streams.copy(is, out); - return out.toString(IOUtils.UTF_8); + return out.toString(StandardCharsets.UTF_8.name()); } catch (Exception e) { logger.error( (Supplier) () -> new ParameterizedMessage( diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index e179650aeef03..6abdc309e21b0 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -22,7 +22,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.util.Counter; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; diff --git a/server/src/main/java/org/elasticsearch/transport/CompressibleBytesOutputStream.java b/server/src/main/java/org/elasticsearch/transport/CompressibleBytesOutputStream.java index 4b4923ab1f817..a90256806f11b 100644 --- a/server/src/main/java/org/elasticsearch/transport/CompressibleBytesOutputStream.java +++ b/server/src/main/java/org/elasticsearch/transport/CompressibleBytesOutputStream.java @@ -19,7 +19,7 @@ package org.elasticsearch.transport; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.Streams; diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index e73debc601430..aa4dec48b46bd 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -21,7 +21,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.store.AlreadyClosedException; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index 8f3a24cd70bfc..b253d9d23df4e 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.transport; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index dd2346443a6cd..c066cbc1136bc 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -22,7 +22,7 @@ import com.carrotsearch.hppc.IntSet; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 5af0ba5eedc72..577b299944662 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -21,7 +21,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.cluster.ClusterName; diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 15d7f6d7c5992..d2a51070c9298 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.action.support.replication; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.UnavailableShardsException; diff --git a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java index aa8b5d092fafa..6b336fdf2b78c 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.bootstrap; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.settings.KeyStoreCommandTestCase; import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureSettings; diff --git a/server/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/server/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 0522f3f15f817..7735fe4b241cc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -21,7 +21,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java index 1f8de1ca02fd7..265e17e68818f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.cluster.routing; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterState; diff --git a/server/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java b/server/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java index 397dd28460492..6e3bd25f13ba9 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java @@ -28,13 +28,13 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; /** Simple tests for this filterreader */ public class ESDirectoryReaderTests extends ESTestCase { - + /** Test that core cache key (needed for NRT) is working */ public void testCoreCacheKey() throws Exception { Directory dir = newDirectory(); @@ -42,7 +42,7 @@ public void testCoreCacheKey() throws Exception { iwc.setMaxBufferedDocs(100); iwc.setMergePolicy(NoMergePolicy.INSTANCE); IndexWriter iw = new IndexWriter(dir, iwc); - + // add two docs, id:0 and id:1 Document doc = new Document(); Field idField = new StringField("id", "", Field.Store.NO); @@ -51,7 +51,7 @@ public void testCoreCacheKey() throws Exception { iw.addDocument(doc); idField.setStringValue("1"); iw.addDocument(doc); - + // open reader ShardId shardId = new ShardId("fake", "_na_", 1); DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw), shardId); @@ -61,7 +61,7 @@ public void testCoreCacheKey() throws Exception { // delete id:0 and reopen iw.deleteDocuments(new Term("id", "0")); DirectoryReader ir2 = DirectoryReader.openIfChanged(ir); - + // we should have the same cache key as before assertEquals(1, ir2.numDocs()); assertEquals(1, ir2.leaves().size()); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index 7c2a2f52b6017..91cf25f53dadd 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.test.ESTestCase; diff --git a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreCommandTestCase.java b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreCommandTestCase.java index 53dbc8589d8d4..7f8c71889e038 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreCommandTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreCommandTestCase.java @@ -29,7 +29,7 @@ import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cli.CommandTestCase; import org.elasticsearch.common.io.PathUtilsForTesting; diff --git a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java index 9414931f996e1..a59cdf13c13ff 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java @@ -41,7 +41,7 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.SimpleFSDirectory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.bootstrap.BootstrapSettings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; diff --git a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 8c2d84cd8c89d..18829d515973d 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.discovery; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; diff --git a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java index 2e685ab555dc2..9527afed5fe03 100644 --- a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -19,7 +19,7 @@ package org.elasticsearch.discovery.single; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; diff --git a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java index f5dabf705fdd1..23a510a257f21 100644 --- a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.discovery.single; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 44914b1958777..9698ab18c198b 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.discovery.zen; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index d2068944ab182..0ecb5a296f570 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.discovery.zen; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterChangedEvent; diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 40193ebc363e5..39f03fefe4e65 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.env; import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.SuppressForbidden; diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index c7d396c778a3d..706421c5ce73a 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -30,7 +30,7 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.SetOnce.AlreadySetException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index e82ed61fbed68..1f2526b2e2829 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -38,7 +38,7 @@ import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 96226507978b0..ba25694ec0470 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -65,7 +65,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/plain/HalfFloatFielddataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/plain/HalfFloatFielddataTests.java index 19972e389b220..c5138b371d14a 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/plain/HalfFloatFielddataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/plain/HalfFloatFielddataTests.java @@ -26,7 +26,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 43136b67e8ccf..ad9d0c414946b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -29,7 +29,7 @@ import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.joda.DateMathParser; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index 6d5ca1add74d5..3ffe48fe70af6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldTypeTests.java index 9cb6cf6179689..1fdfb52b47f67 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldTypeTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.UUIDs; diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index 4fb5a3e82c67e..dcfa2cb34a2db 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -22,7 +22,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.index.IndexRequest; diff --git a/server/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java b/server/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java index 3fc62673de0ce..70813531aeb0e 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java @@ -17,7 +17,7 @@ package org.elasticsearch.index.seqno; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java index f5d4f048c27f5..4479c7b390954 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java @@ -33,7 +33,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index f07016d55fd43..622a9b1acc363 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -21,7 +21,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.store.LockObtainFailedException; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.stats.IndexStats; diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 928f0b3f2f244..2afd12a32cc42 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -29,7 +29,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 166d369281911..25b307e7d300f 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -26,7 +26,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java index 34c1789824ea6..7e9c7b901a212 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java @@ -26,7 +26,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.store.BaseDirectoryWrapper; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.test.ESTestCase; diff --git a/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java b/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java index 61d6a5a845bf8..7ac02874494d4 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java @@ -39,7 +39,7 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.OperationRouting; diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 59189a14af850..392227396de15 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -48,7 +48,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.Version; import org.elasticsearch.ExceptionsHelper; diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogDeletionPolicyTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogDeletionPolicyTests.java index 0779ba0f5a7e7..2f6f4ee3178f2 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogDeletionPolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogDeletionPolicyTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.translog; import org.apache.lucene.store.ByteArrayDataOutput; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.Tuple; diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 98f28e87e6c1a..7547dfc513d8e 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -31,7 +31,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.MockDirectoryWrapper; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index d8af4ad00e066..83bde66e3bd21 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.cache.query.QueryCacheStats; diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index e78a44c88d371..9a2a5d1eacd75 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -29,7 +29,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 9d0008dce5185..68d789d91c2ab 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -32,7 +32,7 @@ import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index d5ff8175edf81..4c079b545c28f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.repositories.blobstore; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.routing.ShardRouting; diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index 0b658d95a0f2e..eba4a03e72cfa 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -27,7 +27,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; import org.elasticsearch.search.query.CancellableCollector; import org.elasticsearch.tasks.TaskCancelledException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java index 67bec2acf7a42..dc0e3c5a8e45f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java @@ -26,7 +26,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index e887cb48585e0..5e10292fa3e7c 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -40,7 +40,7 @@ import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.search.internal.ContextIndexSearcher; diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index b8448890b66fc..922bfd0ef8dad 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -19,7 +19,7 @@ package org.elasticsearch.snapshots; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 05281721e3125..57039f8596671 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.transport; import org.apache.lucene.store.AlreadyClosedException; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 91a245dc5491d..e221a2fb2077e 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.transport; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; diff --git a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java index 64f4182550935..3f4ae7bdd2d76 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.transport; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; diff --git a/server/src/test/java/org/elasticsearch/watcher/FileWatcherTests.java b/server/src/test/java/org/elasticsearch/watcher/FileWatcherTests.java index a13bf2e122d6c..54658b670884f 100644 --- a/server/src/test/java/org/elasticsearch/watcher/FileWatcherTests.java +++ b/server/src/test/java/org/elasticsearch/watcher/FileWatcherTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.watcher; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.test.ESTestCase; @@ -391,4 +391,4 @@ static void append(String string, Path path, Charset cs) throws IOException { writer.append(string); } } -} \ No newline at end of file +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 62be0d48bf31c..117dfe430c891 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -35,7 +35,7 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 290444cd4eff0..39c720a564c67 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -25,7 +25,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.flush.FlushRequest; diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index de4979094b491..4887f2716e85f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -25,7 +25,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 26e733c3777c7..28f05b6b92604 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -26,7 +26,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.http.HttpHost; import org.apache.lucene.search.Sort; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index d6c4942ab6084..d127f1a6b3631 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -19,7 +19,7 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.RandomizedContext; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index c53364ea27993..d82b5052dbf54 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -26,7 +26,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.AlreadyClosedException; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 0d07f18bac0cd..ee9b8b3360ada 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -30,7 +30,7 @@ import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index c0c171e9bca70..fa03d4077045e 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -23,7 +23,7 @@ import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 68f79b1cef779..2202d7a7647df 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.transport; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 25c96da81fa16..05fdfac541a2e 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -19,7 +19,7 @@ */ package org.elasticsearch.test.test; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/channel/ChannelFactoryTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/channel/ChannelFactoryTests.java index 91e1c2023e74c..2f4c619dcb237 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/channel/ChannelFactoryTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/channel/ChannelFactoryTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.transport.nio.channel; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.nio.AcceptingSelector; import org.elasticsearch.transport.nio.SocketSelector; From 8617d5fd2a083267d294687f77a6fd8f094c8767 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Tue, 13 Mar 2018 12:31:02 -0700 Subject: [PATCH 45/89] Correct the way to reference params in painless --- docs/reference/search/request/script-fields.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/request/script-fields.asciidoc b/docs/reference/search/request/script-fields.asciidoc index 24e9c2a017fcb..55623faf2684c 100644 --- a/docs/reference/search/request/script-fields.asciidoc +++ b/docs/reference/search/request/script-fields.asciidoc @@ -21,7 +21,7 @@ GET /_search "test2" : { "script" : { "lang": "painless", - "source": "doc['my_field_name'].value * factor", + "source": "doc['my_field_name'].value * params.factor", "params" : { "factor" : 2.0 } From b81c838dbf02eacfe9e573b7677e1c51cbec1d28 Mon Sep 17 00:00:00 2001 From: Paul Sanwald Date: Tue, 13 Mar 2018 12:58:30 -0700 Subject: [PATCH 46/89] Document and test date_range "missing" support (#28983) * Add a REST integration test that documents date_range support Add a test case that exercises date_range aggregations using the missing option. Addresses #17597 * Test cleanup and correction Adding a document with a null date to exercise `missing` option, update test name to something reasonable. * Update documentation to explain how the "missing" parameter works for date_range aggregations. * Wrap lines at 80 chars in docs. * Change format of test to YAML for readability. --- .../bucket/daterange-aggregation.asciidoc | 93 +++++++++++++++---- .../test/search.aggregation/40_range.yml | 74 +++++++++++++++ 2 files changed, 150 insertions(+), 17 deletions(-) diff --git a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc index 42c64f23cd335..4b172402da9ec 100644 --- a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc @@ -1,8 +1,14 @@ [[search-aggregations-bucket-daterange-aggregation]] === Date Range Aggregation -A range aggregation that is dedicated for date values. The main difference between this aggregation and the normal <> aggregation is that the `from` and `to` values can be expressed in <> expressions, and it is also possible to specify a date format by which the `from` and `to` response fields will be returned. -Note that this aggregation includes the `from` value and excludes the `to` value for each range. +A range aggregation that is dedicated for date values. The main difference +between this aggregation and the normal +<> +aggregation is that the `from` and `to` values can be expressed in +<> expressions, and it is also possible to specify a date +format by which the `from` and `to` response fields will be returned. +Note that this aggregation includes the `from` value and excludes the `to` value +for each range. Example: @@ -30,8 +36,9 @@ POST /sales/_search?size=0 <1> < now minus 10 months, rounded down to the start of the month. <2> >= now minus 10 months, rounded down to the start of the month. -In the example above, we created two range buckets, the first will "bucket" all documents dated prior to 10 months ago and -the second will "bucket" all documents dated since 10 months ago +In the example above, we created two range buckets, the first will "bucket" all +documents dated prior to 10 months ago and the second will "bucket" all +documents dated since 10 months ago Response: @@ -61,12 +68,52 @@ Response: -------------------------------------------------- // TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] +==== Missing Values + +The `missing` parameter defines how documents that are missing a value should +be treated. By default they will be ignored but it is also possible to treat +them as if they had a value. This is done by adding a set of fieldname : +value mappings to specify default values per field. + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs": { + "range": { + "date_range": { + "field": "date", + "missing": "1976/11/30", + "ranges": [ + { + "key": "Older", + "to": "2016/02/01" + }, <1> + { + "key": "Newer", + "from": "2016/02/01", + "to" : "now/d" + } + ] + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +<1> Documents without a value in the `date` field will be added to the "Older" +bucket, as if they had a date value of "1899-12-31". + [[date-format-pattern]] ==== Date Format/Pattern -NOTE: this information was copied from http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html[JodaDate] +NOTE: this information was copied from +http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html[JodaDate] -All ASCII letters are reserved as format pattern letters, which are defined as follows: +All ASCII letters are reserved as format pattern letters, which are defined +as follows: [options="header"] |======= @@ -104,30 +151,41 @@ All ASCII letters are reserved as format pattern letters, which are defined as f The count of pattern letters determine the format. -Text:: If the number of pattern letters is 4 or more, the full form is used; otherwise a short or abbreviated form is used if available. +Text:: If the number of pattern letters is 4 or more, the full form is used; +otherwise a short or abbreviated form is used if available. -Number:: The minimum number of digits. Shorter numbers are zero-padded to this amount. +Number:: The minimum number of digits. Shorter numbers are zero-padded to +this amount. -Year:: Numeric presentation for year and weekyear fields are handled specially. For example, if the count of 'y' is 2, the year will be displayed as the zero-based year of the century, which is two digits. +Year:: Numeric presentation for year and weekyear fields are handled +specially. For example, if the count of 'y' is 2, the year will be displayed +as the zero-based year of the century, which is two digits. Month:: 3 or over, use text, otherwise use number. -Zone:: 'Z' outputs offset without a colon, 'ZZ' outputs the offset with a colon, 'ZZZ' or more outputs the zone id. +Zone:: 'Z' outputs offset without a colon, 'ZZ' outputs the offset with a +colon, 'ZZZ' or more outputs the zone id. Zone names:: Time zone names ('z') cannot be parsed. -Any characters in the pattern that are not in the ranges of ['a'..'z'] and ['A'..'Z'] will be treated as quoted text. For instance, characters like ':', '.', ' ', '#' and '?' will appear in the resulting time text even they are not embraced within single quotes. +Any characters in the pattern that are not in the ranges of ['a'..'z'] and +['A'..'Z'] will be treated as quoted text. For instance, characters like ':', + '.', ' ', '#' and '?' will appear in the resulting time text even they are + not embraced within single quotes. [[time-zones]] ==== Time zone in date range aggregations -Dates can be converted from another time zone to UTC by specifying the `time_zone` parameter. +Dates can be converted from another time zone to UTC by specifying the +`time_zone` parameter. -Time zones may either be specified as an ISO 8601 UTC offset (e.g. +01:00 or -08:00) or as one of -the http://www.joda.org/joda-time/timezones.html[time zone ids] from the TZ database. +Time zones may either be specified as an ISO 8601 UTC offset (e.g. +01:00 or +-08:00) or as one of the http://www.joda.org/joda-time/timezones.html [time +zone ids] from the TZ database. -The `time_zone` parameter is also applied to rounding in date math expressions. As an example, -to round to the beginning of the day in the CET time zone, you can do the following: +The `time_zone` parameter is also applied to rounding in date math expressions. +As an example, to round to the beginning of the day in the CET time zone, you +can do the following: [source,js] -------------------------------------------------- @@ -156,7 +214,8 @@ POST /sales/_search?size=0 ==== Keyed Response -Setting the `keyed` flag to `true` will associate a unique string key with each bucket and return the ranges as a hash rather than an array: +Setting the `keyed` flag to `true` will associate a unique string key with each +bucket and return the ranges as a hash rather than an array: [source,js] -------------------------------------------------- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml index fd8a016976d62..366243c78ee7b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml @@ -273,3 +273,77 @@ setup: - match: { aggregations.date_range.buckets.1.from: 3000000 } - match: { aggregations.date_range.buckets.1.to: 4000000 } +--- +"Date Range Missing": + - do: + index: + index: test + type: test + id: 1 + body: { "date" : "28800000000" } + + - do: + index: + index: test + type: test + id: 2 + body: { "date" : "315561600000" } + + - do: + index: + index: test + type: test + id: 3 + body: { "date" : "631180800000" } + + - do: + index: + index: test + type: test + id: 4 + body: { "date" : "-2524492800000" } + + - do: + index: + index: test + type: test + id: 5 + body: { "ip" : "192.168.0.1" } + + - do: + indices.refresh: {} + + - do: + search: + body: + aggs: + age_groups: + date_range: + field: date + missing: "-2240496000000" + ranges: + - key: Generation Y + from: '315561600000' + to: '946713600000' + - key: Generation X + from: "-157737600000" + to: '315561600000' + - key: Other + to: "-2208960000000" + + - match: { hits.total: 5 } + + - length: { aggregations.age_groups.buckets: 3 } + + - match: { aggregations.age_groups.buckets.0.key: "Other" } + + - match: { aggregations.age_groups.buckets.0.doc_count: 2 } + + - match: { aggregations.age_groups.buckets.1.key: "Generation X" } + + - match: { aggregations.age_groups.buckets.1.doc_count: 1 } + + - match: { aggregations.age_groups.buckets.2.key: "Generation Y" } + + - match: { aggregations.age_groups.buckets.2.doc_count: 2 } + From e5d768fd04e683d2a5ee9e8b356bfb3530ed9670 Mon Sep 17 00:00:00 2001 From: Robin Neatherway Date: Tue, 13 Mar 2018 20:16:48 +0000 Subject: [PATCH 47/89] Painless: Correct ClassToName string conversion (#28997) A typo of 'dimensions' rather than 'dimension' caused an infinite loop. --- .../src/main/java/org/elasticsearch/painless/Definition.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index 6e37e5be0bb0d..95032acabef9b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -565,7 +565,7 @@ public static String ClassToName(Class clazz) { if (component == def.class) { StringBuilder builder = new StringBuilder(def.class.getSimpleName()); - for (int dimension = 0; dimension < dimensions; dimensions++) { + for (int dimension = 0; dimension < dimensions; dimension++) { builder.append("[]"); } From a032337e1da0c88a87f23bafea5ecdbc95e10b17 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 16:30:13 -0400 Subject: [PATCH 48/89] Remove interning from prefix logger (#29031) This interning is completely unnecessary because we look up the marker by the prefix (value, not identity) anyway. This means that regardless of the identity of the prefix, we end up with the same marker. That is all that we really care about here. --- .../java/org/elasticsearch/common/logging/PrefixLogger.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java b/server/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java index a78330c3e8564..f46d360a3fa5b 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java +++ b/server/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java @@ -76,7 +76,7 @@ public String prefix() { PrefixLogger(final ExtendedLogger logger, final String name, final String prefix) { super(logger, name, null); - final String actualPrefix = (prefix == null ? "" : prefix).intern(); + final String actualPrefix = (prefix == null ? "" : prefix); final Marker actualMarker; // markers is not thread-safe, so we synchronize access synchronized (markers) { @@ -88,6 +88,7 @@ public String prefix() { * those references are held strongly, this would give a strong reference back to the key preventing them from ever being * collected. This also guarantees that no other strong reference can be held to the prefix anywhere. */ + // noinspection RedundantStringConstructorCall markers.put(new String(actualPrefix), actualMarker); } else { actualMarker = maybeMarker; From 2b52043be79347828d40a1a4ed0901cd471c4215 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 16:31:19 -0400 Subject: [PATCH 49/89] Log template creation and deletion (#29027) These can be seen at the debug level via cluster state update logging but really they should be more visible like index creation and deletion. This commit adds info-level logging for template puts and deletes. --- .../cluster/metadata/MetaDataIndexTemplateService.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 9d8da37cbeeba..b046e2e9cc6b8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -117,6 +117,7 @@ public ClusterState execute(ClusterState currentState) { } MetaData.Builder metaData = MetaData.builder(currentState.metaData()); for (String templateName : templateNames) { + logger.info("removing template [{}]", templateName); metaData.removeTemplate(templateName); } return ClusterState.builder(currentState).metaData(metaData).build(); @@ -185,6 +186,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { MetaData.Builder builder = MetaData.builder(currentState.metaData()).put(template); + logger.info("adding template [{}] for index patterns {}", request.name, request.indexPatterns); return ClusterState.builder(currentState).metaData(builder).build(); } From ca10abc4a8bb172b25c7331cdaff15f561d6df15 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 16:32:16 -0400 Subject: [PATCH 50/89] Put JVM crash logs in the default log directory (#29028) This commit adds a JVM flag to ensure that the JVM fatal error logs land in the default log directory. Users that wish to use an alternative location should change the path configured here. --- distribution/build.gradle | 6 ++++++ distribution/src/config/jvm.options | 3 +++ 2 files changed, 9 insertions(+) diff --git a/distribution/build.gradle b/distribution/build.gradle index 5bb9944b4a362..f3fe27168f70e 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -265,6 +265,12 @@ subprojects { 'def': "#-XX:HeapDumpPath=/heap/dump/path" ], + 'error.file': [ + 'deb': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", + 'rpm': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", + 'def': "#-XX:ErrorFile=/error/file/path" + ], + 'stopping.timeout': [ 'rpm': 86400, ], diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index eb3d772fa56fd..e862343de8d6b 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -81,6 +81,9 @@ # ensure the directory exists and has sufficient space ${heap.dump.path} +# specify an alternative path for JVM fatal error logs +${error.file} + ## JDK 8 GC logging 8:-XX:+PrintGCDetails From b221a5a9dcd2e2dfb935544560fa2202b2daba5c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 17:32:18 -0400 Subject: [PATCH 51/89] Archive unknown or invalid settings on updates (#28888) Today we can end up in a situation where the cluster state contains unknown or invalid settings. This can happen easily during a rolling upgrade. For example, consider two nodes that are on a version that considers the setting foo.bar to be known and valid. Assume one of these nodes is restarted on a higher version that considers foo.bar to now be either unknown or invalid, and then the second node is restarted too. Now, both nodes will be on a version that consider foo.bar to be unknown or invalid yet this setting will still be contained in the cluster state. This means that if a cluster settings update is applied and we validate the settings update with the existing settings then validation will fail. In such a state, the offending setting can not even be removed. This commit helps out with this situation by archiving any settings that are unknown or invalid at the time that a settings update is applied. This allows the setting update to go through, and the archived settings can be removed at a later time. --- .../cluster/settings/SettingsUpdater.java | 83 ++++- .../TransportClusterUpdateSettingsAction.java | 3 +- .../settings/SettingsUpdaterTests.java | 332 +++++++++++++++++- 3 files changed, 398 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java index dc13913652a34..ec72dd949674c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java @@ -19,13 +19,20 @@ package org.elasticsearch.action.admin.cluster.settings; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import java.util.Map; + import static org.elasticsearch.cluster.ClusterState.builder; +import static org.elasticsearch.common.settings.AbstractScopedSettings.ARCHIVED_SETTINGS_PREFIX; /** * Updates transient and persistent cluster state settings if there are any changes @@ -48,15 +55,34 @@ synchronized Settings getPersistentUpdate() { return persistentUpdates.build(); } - synchronized ClusterState updateSettings(final ClusterState currentState, Settings transientToApply, Settings persistentToApply) { + synchronized ClusterState updateSettings( + final ClusterState currentState, final Settings transientToApply, final Settings persistentToApply, final Logger logger) { boolean changed = false; - Settings.Builder transientSettings = Settings.builder(); - transientSettings.put(currentState.metaData().transientSettings()); - changed |= clusterSettings.updateDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient"); + /* + * Our cluster state could have unknown or invalid settings that are known and valid in a previous version of Elasticsearch. We can + * end up in this situation during a rolling upgrade where the previous version will infect the current version of Elasticsearch + * with settings that the current version either no longer knows about or now considers to have invalid values. When the current + * version of Elasticsearch becomes infected with a cluster state containing such settings, we need to skip validating such settings + * and instead archive them. Consequently, for the current transient and persistent settings in the cluster state we do the + * following: + * - split existing settings instance into two with the known and valid settings in one, and the unknown or invalid in another + * (note that existing archived settings are included in the known and valid settings) + * - validate the incoming settings update combined with the existing known and valid settings + * - merge in the archived unknown or invalid settings + */ + final Tuple partitionedTransientSettings = + partitionKnownAndValidSettings(currentState.metaData().transientSettings(), "transient", logger); + final Settings knownAndValidTransientSettings = partitionedTransientSettings.v1(); + final Settings unknownOrInvalidTransientSettings = partitionedTransientSettings.v2(); + final Settings.Builder transientSettings = Settings.builder().put(knownAndValidTransientSettings); + changed |= clusterSettings.updateDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient"); - Settings.Builder persistentSettings = Settings.builder(); - persistentSettings.put(currentState.metaData().persistentSettings()); + final Tuple partitionedPersistentSettings = + partitionKnownAndValidSettings(currentState.metaData().persistentSettings(), "persistent", logger); + final Settings knownAndValidPersistentSettings = partitionedPersistentSettings.v1(); + final Settings unknownOrInvalidPersistentSettings = partitionedPersistentSettings.v2(); + final Settings.Builder persistentSettings = Settings.builder().put(knownAndValidPersistentSettings); changed |= clusterSettings.updateDynamicSettings(persistentToApply, persistentSettings, persistentUpdates, "persistent"); final ClusterState clusterState; @@ -69,8 +95,8 @@ synchronized ClusterState updateSettings(final ClusterState currentState, Settin clusterSettings.validate(persistentFinalSettings, true); MetaData.Builder metaData = MetaData.builder(currentState.metaData()) - .persistentSettings(persistentFinalSettings) - .transientSettings(transientFinalSettings); + .transientSettings(Settings.builder().put(transientFinalSettings).put(unknownOrInvalidTransientSettings).build()) + .persistentSettings(Settings.builder().put(persistentFinalSettings).put(unknownOrInvalidPersistentSettings).build()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); boolean updatedReadOnly = MetaData.SETTING_READ_ONLY_SETTING.get(metaData.persistentSettings()) @@ -102,5 +128,46 @@ synchronized ClusterState updateSettings(final ClusterState currentState, Settin return clusterState; } + /** + * Partitions the settings into those that are known and valid versus those that are unknown or invalid. The resulting tuple contains + * the known and valid settings in the first component and the unknown or invalid settings in the second component. Note that archived + * settings contained in the settings to partition are included in the first component. + * + * @param settings the settings to partition + * @param settingsType a string to identify the settings (for logging) + * @param logger a logger to sending warnings to + * @return the partitioned settings + */ + private Tuple partitionKnownAndValidSettings( + final Settings settings, final String settingsType, final Logger logger) { + final Settings existingArchivedSettings = settings.filter(k -> k.startsWith(ARCHIVED_SETTINGS_PREFIX)); + final Settings settingsExcludingExistingArchivedSettings = + settings.filter(k -> k.startsWith(ARCHIVED_SETTINGS_PREFIX) == false); + final Settings settingsWithUnknownOrInvalidArchived = clusterSettings.archiveUnknownOrInvalidSettings( + settingsExcludingExistingArchivedSettings, + e -> logUnknownSetting(settingsType, e, logger), + (e, ex) -> logInvalidSetting(settingsType, e, ex, logger)); + return Tuple.tuple( + Settings.builder() + .put(settingsWithUnknownOrInvalidArchived.filter(k -> k.startsWith(ARCHIVED_SETTINGS_PREFIX) == false)) + .put(existingArchivedSettings) + .build(), + settingsWithUnknownOrInvalidArchived.filter(k -> k.startsWith(ARCHIVED_SETTINGS_PREFIX))); + } + + private void logUnknownSetting(final String settingType, final Map.Entry e, final Logger logger) { + logger.warn("ignoring existing unknown {} setting: [{}] with value [{}]; archiving", settingType, e.getKey(), e.getValue()); + } + + private void logInvalidSetting( + final String settingType, final Map.Entry e, final IllegalArgumentException ex, final Logger logger) { + logger.warn( + (Supplier) + () -> new ParameterizedMessage("ignoring existing invalid {} setting: [{}] with value [{}]; archiving", + settingType, + e.getKey(), + e.getValue()), + ex); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index dae55b2fc048a..edc30bd3c35fd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -180,7 +180,8 @@ public void onFailure(String source, Exception e) { @Override public ClusterState execute(final ClusterState currentState) { - ClusterState clusterState = updater.updateSettings(currentState, request.transientSettings(), request.persistentSettings()); + ClusterState clusterState = + updater.updateSettings(currentState, request.transientSettings(), request.persistentSettings(), logger); changed = clusterState != currentState; return clusterState; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java index 19dd64e6324ca..d582141898684 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java @@ -28,11 +28,20 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; +import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.common.settings.AbstractScopedSettings.ARCHIVED_SETTINGS_PREFIX; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.not; + public class SettingsUpdaterTests extends ESTestCase { @@ -51,7 +60,7 @@ public void testUpdateSetting() { .put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 4.5).build()); ClusterState build = builder.metaData(metaData).build(); ClusterState clusterState = updater.updateSettings(build, Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.5).build(), - Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.4).build()); + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 0.4).build(), logger); assertNotSame(clusterState, build); assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 0.4, 0.1); assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 2.5, 0.1); @@ -59,14 +68,14 @@ public void testUpdateSetting() { assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().transientSettings()), 4.5, 0.1); clusterState = updater.updateSettings(clusterState, Settings.builder().putNull("cluster.routing.*").build(), - Settings.EMPTY); + Settings.EMPTY, logger); assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 0.4, 0.1); assertEquals(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 2.5, 0.1); assertFalse(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().transientSettings())); clusterState = updater.updateSettings(clusterState, - Settings.EMPTY, Settings.builder().putNull("cluster.routing.*").put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 10.0).build()); + Settings.EMPTY, Settings.builder().putNull("cluster.routing.*").put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 10.0).build(), logger); assertEquals(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.get(clusterState.metaData().persistentSettings()), 10.0, 0.1); assertFalse(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.exists(clusterState.metaData().persistentSettings())); @@ -93,7 +102,7 @@ public void testAllOrNothing() { try { updater.updateSettings(build, Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").build(), - Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), "not a float").put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build(), logger); fail("all or nothing"); } catch (IllegalArgumentException ex) { logger.info("", ex); @@ -119,21 +128,21 @@ public void testClusterBlock() { ClusterState build = builder.metaData(metaData).build(); ClusterState clusterState = updater.updateSettings(build, Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), true).build(), - Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build(), logger); assertEquals(clusterState.blocks().global().size(), 1); assertEquals(clusterState.blocks().global().iterator().next(), MetaData.CLUSTER_READ_ONLY_BLOCK); clusterState = updater.updateSettings(build, Settings.EMPTY, - Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build()); + Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), false).build(), logger); assertEquals(clusterState.blocks().global().size(), 0); clusterState = updater.updateSettings(build, Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), true).build(), - Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build()); + Settings.builder().put(BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING.getKey(), 1.6).put(BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING.getKey(), 1.0f).build(), logger); assertEquals(clusterState.blocks().global().size(), 1); assertEquals(clusterState.blocks().global().iterator().next(), MetaData.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK); clusterState = updater.updateSettings(build, Settings.EMPTY, - Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), false).build()); + Settings.builder().put(MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), false).build(), logger); assertEquals(clusterState.blocks().global().size(), 0); } @@ -151,16 +160,317 @@ public void testDeprecationLogging() { ClusterState.builder(new ClusterName("foo")).metaData(MetaData.builder().persistentSettings(settings).build()).build(); final Settings toApplyDebug = Settings.builder().put("logger.org.elasticsearch", "debug").build(); - final ClusterState afterDebug = settingsUpdater.updateSettings(clusterState, toApplyDebug, Settings.EMPTY); + final ClusterState afterDebug = settingsUpdater.updateSettings(clusterState, toApplyDebug, Settings.EMPTY, logger); assertSettingDeprecationsAndWarnings(new Setting[] { deprecatedSetting }); final Settings toApplyUnset = Settings.builder().putNull("logger.org.elasticsearch").build(); - final ClusterState afterUnset = settingsUpdater.updateSettings(afterDebug, toApplyUnset, Settings.EMPTY); + final ClusterState afterUnset = settingsUpdater.updateSettings(afterDebug, toApplyUnset, Settings.EMPTY, logger); assertSettingDeprecationsAndWarnings(new Setting[] { deprecatedSetting }); // we also check that if no settings are changed, deprecation logging still occurs - settingsUpdater.updateSettings(afterUnset, toApplyUnset, Settings.EMPTY); + settingsUpdater.updateSettings(afterUnset, toApplyUnset, Settings.EMPTY, logger); assertSettingDeprecationsAndWarnings(new Setting[] { deprecatedSetting }); } + public void testUpdateWithUnknownAndSettings() { + // we will randomly apply some new dynamic persistent and transient settings + final int numberOfDynamicSettings = randomIntBetween(1, 8); + final List> dynamicSettings = new ArrayList<>(numberOfDynamicSettings); + for (int i = 0; i < numberOfDynamicSettings; i++) { + final Setting dynamicSetting = Setting.simpleString("dynamic.setting" + i, Property.Dynamic, Property.NodeScope); + dynamicSettings.add(dynamicSetting); + } + + // these are invalid settings that exist as either persistent or transient settings + final int numberOfInvalidSettings = randomIntBetween(0, 7); + final List> invalidSettings = new ArrayList<>(numberOfInvalidSettings); + for (int i = 0; i < numberOfInvalidSettings; i++) { + final Setting invalidSetting = Setting.simpleString( + "invalid.setting" + i, + (value, settings) -> { + throw new IllegalArgumentException("invalid"); + }, + Property.NodeScope); + invalidSettings.add(invalidSetting); + } + + // these are unknown settings that exist as either persistent or transient settings + final int numberOfUnknownSettings = randomIntBetween(0, 7); + final List> unknownSettings = new ArrayList<>(numberOfUnknownSettings); + for (int i = 0; i < numberOfUnknownSettings; i++) { + final Setting unknownSetting = Setting.simpleString("unknown.setting" + i, Property.NodeScope); + unknownSettings.add(unknownSetting); + } + + final Settings.Builder existingPersistentSettings = Settings.builder(); + final Settings.Builder existingTransientSettings = Settings.builder(); + + for (final Setting dynamicSetting : dynamicSettings) { + switch (randomIntBetween(0, 2)) { + case 0: + existingPersistentSettings.put(dynamicSetting.getKey(), "existing_value"); + break; + case 1: + existingTransientSettings.put(dynamicSetting.getKey(), "existing_value"); + break; + case 2: + break; + } + } + + for (final Setting invalidSetting : invalidSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(invalidSetting.getKey(), "value"); + } else { + existingTransientSettings.put(invalidSetting.getKey(), "value"); + } + } + + for (final Setting unknownSetting : unknownSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(unknownSetting.getKey(), "value"); + } else { + existingTransientSettings.put(unknownSetting.getKey(), "value"); + } + } + + // register all the known settings (note that we do not register the unknown settings) + final Set> knownSettings = + Stream.concat( + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), + Stream.concat(dynamicSettings.stream(), invalidSettings.stream())) + .collect(Collectors.toSet()); + final ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, knownSettings); + for (final Setting dynamicSetting : dynamicSettings) { + clusterSettings.addSettingsUpdateConsumer(dynamicSetting, s -> {}); + } + final SettingsUpdater settingsUpdater = new SettingsUpdater(clusterSettings); + final MetaData.Builder metaDataBuilder = + MetaData.builder() + .persistentSettings(existingPersistentSettings.build()) + .transientSettings(existingTransientSettings.build()); + final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster")).metaData(metaDataBuilder).build(); + + // prepare the dynamic settings update + final Settings.Builder persistentToApply = Settings.builder(); + final Settings.Builder transientToApply = Settings.builder(); + for (final Setting dynamicSetting : dynamicSettings) { + switch (randomIntBetween(0, 2)) { + case 0: + persistentToApply.put(dynamicSetting.getKey(), "new_value"); + break; + case 1: + transientToApply.put(dynamicSetting.getKey(), "new_value"); + break; + case 2: + break; + } + } + + if (transientToApply.keys().isEmpty() && persistentToApply.keys().isEmpty()) { + // force a settings update otherwise our assertions below will fail + if (randomBoolean()) { + persistentToApply.put(dynamicSettings.get(0).getKey(), "new_value"); + } else { + transientToApply.put(dynamicSettings.get(0).getKey(), "new_value"); + } + } + + final ClusterState clusterStateAfterUpdate = + settingsUpdater.updateSettings(clusterState, transientToApply.build(), persistentToApply.build(), logger); + + // the invalid settings should be archived and not present in non-archived form + for (final Setting invalidSetting : invalidSettings) { + if (existingPersistentSettings.keys().contains(invalidSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + invalidSetting.getKey())); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + invalidSetting.getKey())); + } + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(invalidSetting.getKey()))); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(invalidSetting.getKey()))); + } + + // the unknown settings should be archived and not present in non-archived form + for (final Setting unknownSetting : unknownSettings) { + if (existingPersistentSettings.keys().contains(unknownSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + unknownSetting.getKey())); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + unknownSetting.getKey())); + } + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(unknownSetting.getKey()))); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(unknownSetting.getKey()))); + } + + // the dynamic settings should be applied + for (final Setting dynamicSetting : dynamicSettings) { + if (persistentToApply.keys().contains(dynamicSetting.getKey())) { + assertThat(clusterStateAfterUpdate.metaData().persistentSettings().keySet(), hasItem(dynamicSetting.getKey())); + assertThat(clusterStateAfterUpdate.metaData().persistentSettings().get(dynamicSetting.getKey()), equalTo("new_value")); + } else if (transientToApply.keys().contains(dynamicSetting.getKey())) { + assertThat(clusterStateAfterUpdate.metaData().transientSettings().keySet(), hasItem(dynamicSetting.getKey())); + assertThat(clusterStateAfterUpdate.metaData().transientSettings().get(dynamicSetting.getKey()), equalTo("new_value")); + } else { + if (existingPersistentSettings.keys().contains(dynamicSetting.getKey())) { + assertThat(clusterStateAfterUpdate.metaData().persistentSettings().keySet(), hasItem(dynamicSetting.getKey())); + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().get(dynamicSetting.getKey()), + equalTo("existing_value")); + } else if (existingTransientSettings.keys().contains(dynamicSetting.getKey())) { + assertThat(clusterStateAfterUpdate.metaData().transientSettings().keySet(), hasItem(dynamicSetting.getKey())); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().get(dynamicSetting.getKey()), + equalTo("existing_value")); + } else { + assertThat(clusterStateAfterUpdate.metaData().persistentSettings().keySet(), not(hasItem(dynamicSetting.getKey()))); + assertThat(clusterStateAfterUpdate.metaData().transientSettings().keySet(), not(hasItem(dynamicSetting.getKey()))); + } + } + } + } + + public void testRemovingArchivedSettingsDoesNotRemoveNonArchivedInvalidOrUnknownSettings() { + // these are settings that are archived in the cluster state as either persistent or transient settings + final int numberOfArchivedSettings = randomIntBetween(1, 8); + final List> archivedSettings = new ArrayList<>(numberOfArchivedSettings); + for (int i = 0; i < numberOfArchivedSettings; i++) { + final Setting archivedSetting = Setting.simpleString("setting", Property.NodeScope); + archivedSettings.add(archivedSetting); + } + + // these are invalid settings that exist as either persistent or transient settings + final int numberOfInvalidSettings = randomIntBetween(0, 7); + final List> invalidSettings = new ArrayList<>(numberOfInvalidSettings); + for (int i = 0; i < numberOfInvalidSettings; i++) { + final Setting invalidSetting = Setting.simpleString( + "invalid.setting" + i, + (value, settings) -> { + throw new IllegalArgumentException("invalid"); + }, + Property.NodeScope); + invalidSettings.add(invalidSetting); + } + + // these are unknown settings that exist as either persistent or transient settings + final int numberOfUnknownSettings = randomIntBetween(0, 7); + final List> unknownSettings = new ArrayList<>(numberOfUnknownSettings); + for (int i = 0; i < numberOfUnknownSettings; i++) { + final Setting unknownSetting = Setting.simpleString("unknown.setting" + i, Property.NodeScope); + unknownSettings.add(unknownSetting); + } + + final Settings.Builder existingPersistentSettings = Settings.builder(); + final Settings.Builder existingTransientSettings = Settings.builder(); + + for (final Setting archivedSetting : archivedSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey(), "value"); + } else { + existingTransientSettings.put(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey(), "value"); + } + } + + for (final Setting invalidSetting : invalidSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(invalidSetting.getKey(), "value"); + } else { + existingTransientSettings.put(invalidSetting.getKey(), "value"); + } + } + + for (final Setting unknownSetting : unknownSettings) { + if (randomBoolean()) { + existingPersistentSettings.put(unknownSetting.getKey(), "value"); + } else { + existingTransientSettings.put(unknownSetting.getKey(), "value"); + } + } + + // register all the known settings (not that we do not register the unknown settings) + final Set> knownSettings = + Stream.concat( + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), + Stream.concat(archivedSettings.stream(), invalidSettings.stream())) + .collect(Collectors.toSet()); + final ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, knownSettings); + final SettingsUpdater settingsUpdater = new SettingsUpdater(clusterSettings); + final MetaData.Builder metaDataBuilder = + MetaData.builder() + .persistentSettings(existingPersistentSettings.build()) + .transientSettings(existingTransientSettings.build()); + final ClusterState clusterState = ClusterState.builder(new ClusterName("cluster")).metaData(metaDataBuilder).build(); + + final Settings.Builder persistentToApply = Settings.builder().put("archived.*", (String)null); + final Settings.Builder transientToApply = Settings.builder().put("archived.*", (String)null); + + final ClusterState clusterStateAfterUpdate = + settingsUpdater.updateSettings(clusterState, transientToApply.build(), persistentToApply.build(), logger); + + // existing archived settings are removed + for (final Setting archivedSetting : archivedSettings) { + if (existingPersistentSettings.keys().contains(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey()))); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(ARCHIVED_SETTINGS_PREFIX + archivedSetting.getKey()))); + } + } + + // the invalid settings should be archived and not present in non-archived form + for (final Setting invalidSetting : invalidSettings) { + if (existingPersistentSettings.keys().contains(invalidSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + invalidSetting.getKey())); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + invalidSetting.getKey())); + } + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(invalidSetting.getKey()))); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(invalidSetting.getKey()))); + } + + // the unknown settings should be archived and not present in non-archived form + for (final Setting unknownSetting : unknownSettings) { + if (existingPersistentSettings.keys().contains(unknownSetting.getKey())) { + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + unknownSetting.getKey())); + } else { + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + hasItem(ARCHIVED_SETTINGS_PREFIX + unknownSetting.getKey())); + } + assertThat( + clusterStateAfterUpdate.metaData().persistentSettings().keySet(), + not(hasItem(unknownSetting.getKey()))); + assertThat( + clusterStateAfterUpdate.metaData().transientSettings().keySet(), + not(hasItem(unknownSetting.getKey()))); + } + } + } From aa2dc7cfb3b8c03e2b555461adccc29f05218d34 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 18:18:01 -0400 Subject: [PATCH 52/89] Add docs for error file configuration (#29032) This commit adds docs for configuring the error file setting for where the JVM writes fatal error logs. --- .../setup/important-settings/error-file.asciidoc | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 docs/reference/setup/important-settings/error-file.asciidoc diff --git a/docs/reference/setup/important-settings/error-file.asciidoc b/docs/reference/setup/important-settings/error-file.asciidoc new file mode 100644 index 0000000000000..37f1d2a0b14ed --- /dev/null +++ b/docs/reference/setup/important-settings/error-file.asciidoc @@ -0,0 +1,16 @@ +[[error-file-path]] +=== JVM fatal error logs + +The <> and <> package distributions default to configuring +the JVM to write fatal error logs to `/var/lib/elasticsearch`; these are logs +produced by the JVM when it encounters a fatal error (e.g., a segmentation +fault). If this path is not suitable for receiving logs, you should modify the +entry `-XX:ErrorFile=/var/lib/elasticsearch/hs_err_pid%p.log` in +<> to an alternate path. + +Note that the archive distributions do not configure the error file path by +default. Instead, the JVM will default to writing to the working directory for +the Elasticsearch process. If you wish to configure an error file path, you +should modify the entry `#-XX:ErrorFile=/error/file/path` in +<> to remove the comment marker `#` and to specify an +actual path. From 236da6a3281fcbc0f534c1cfae6eb0f27aceb83c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 18:27:14 -0400 Subject: [PATCH 53/89] Add search slowlog level to docs (#29040) This commit adds an indication how to set the search slowlog level to the docs. --- docs/reference/index-modules/slowlog.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/index-modules/slowlog.asciidoc b/docs/reference/index-modules/slowlog.asciidoc index 1ee49b1c04dbc..de8706a8c89f9 100644 --- a/docs/reference/index-modules/slowlog.asciidoc +++ b/docs/reference/index-modules/slowlog.asciidoc @@ -22,6 +22,8 @@ index.search.slowlog.threshold.fetch.warn: 1s index.search.slowlog.threshold.fetch.info: 800ms index.search.slowlog.threshold.fetch.debug: 500ms index.search.slowlog.threshold.fetch.trace: 200ms + +index.search.slowlog.level: info -------------------------------------------------- All of the above settings are _dynamic_ and are set per-index. From f920dd88a9e1c7f2ba26bd0701b0fbbc2856857e Mon Sep 17 00:00:00 2001 From: Chun On Lee Date: Tue, 13 Mar 2018 07:44:21 -0700 Subject: [PATCH 54/89] Update "_doc" to "account" type for bulk example (#28786) * Change 'account' to '_doc' as types are deprecated --- docs/reference/getting-started.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index b3156dbc1f414..af7fc8fa6d69b 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -669,7 +669,7 @@ You can download the sample dataset (accounts.json) from https://github.com/elas [source,sh] -------------------------------------------------- -curl -H "Content-Type: application/json" -XPOST "localhost:9200/bank/account/_bulk?pretty&refresh" --data-binary "@accounts.json" +curl -H "Content-Type: application/json" -XPOST "localhost:9200/bank/_doc/_bulk?pretty&refresh" --data-binary "@accounts.json" curl "localhost:9200/_cat/indices?v" -------------------------------------------------- // NOTCONSOLE From 795af9029bda4262bb9210acc52c4a5f67f1ffbc Mon Sep 17 00:00:00 2001 From: olcbean <26058559+olcbean@users.noreply.github.com> Date: Tue, 13 Mar 2018 20:16:27 +0100 Subject: [PATCH 55/89] REST: deprecate `field_data` in Clear Cache API (#28943) We call it `fielddata` everywhere else in the code and API so we may as well be consistent. --- .../test/indices.clear_cache/10_basic.yml | 23 +++++++++++++++++++ .../indices/RestClearIndicesCacheAction.java | 4 ++-- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml index d8db152e979b0..b5e98949f03b4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yml @@ -25,3 +25,26 @@ - 'Deprecated field [request_cache] used, expected [request] instead' indices.clear_cache: request_cache: false + +--- +"clear_cache with field_data set to true": + - skip: + version: " - 6.2.99" + reason: field_data was deprecated in 6.3.0 + features: "warnings" + + - do: + warnings: + - 'Deprecated field [field_data] used, expected [fielddata] instead' + indices.clear_cache: + field_data: true + +--- +"clear_cache with fielddata set to true": + - skip: + version: " - 6.2.99" + reason: fielddata was deprecated before 6.3.0 + + - do: + indices.clear_cache: + fielddata: true diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java index b96ada4cdd974..d0ec01dc552e4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java @@ -88,7 +88,7 @@ public static ClearIndicesCacheRequest fromRequest(final RestRequest request, Cl clearIndicesCacheRequest.queryCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.queryCache())); } else if (Fields.REQUEST.match(entry.getKey(), LoggingDeprecationHandler.INSTANCE)) { clearIndicesCacheRequest.requestCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.requestCache())); - } else if (Fields.FIELD_DATA.match(entry.getKey(), LoggingDeprecationHandler.INSTANCE)) { + } else if (Fields.FIELDDATA.match(entry.getKey(), LoggingDeprecationHandler.INSTANCE)) { clearIndicesCacheRequest.fieldDataCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.fieldDataCache())); } else if (Fields.FIELDS.match(entry.getKey(), LoggingDeprecationHandler.INSTANCE)) { clearIndicesCacheRequest.fields(request.paramAsStringArray(entry.getKey(), clearIndicesCacheRequest.fields())); @@ -101,7 +101,7 @@ public static ClearIndicesCacheRequest fromRequest(final RestRequest request, Cl public static class Fields { public static final ParseField QUERY = new ParseField("query", "filter", "filter_cache"); public static final ParseField REQUEST = new ParseField("request", "request_cache"); - public static final ParseField FIELD_DATA = new ParseField("field_data", "fielddata"); + public static final ParseField FIELDDATA = new ParseField("fielddata", "field_data"); public static final ParseField FIELDS = new ParseField("fields"); } From 890e0fb755a9ae0ab12cc178d59c61693eb63030 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 20:40:47 -0400 Subject: [PATCH 56/89] Add total hits to the search slow log (#29034) This commit adds the total hits to the search slow log. --- .../main/java/org/elasticsearch/index/SearchSlowLog.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java index a48e3d7bd72c5..f3c5d07f1f2f4 100644 --- a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java +++ b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -149,8 +149,11 @@ static final class SlowLogSearchContextPrinter { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append(context.indexShard().shardId()).append(" "); - sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); + sb.append(context.indexShard().shardId()) + .append(" ") + .append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], ") + .append("took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], ") + .append("total_hits[").append(context.queryResult().getTotalHits()).append("], "); if (context.getQueryShardContext().getTypes() == null) { sb.append("types[], "); } else { From 394d7bdfaea93e94e5a1ca326a8fb4dd39e8e5fb Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 13 Mar 2018 23:42:16 -0400 Subject: [PATCH 57/89] Do not swallow fail to convert exceptions (#29043) When converting the source for an indexing request to JSON, the conversion can throw an I/O exception which we swallow and proceed with logging to the slow log. The cause of the I/O exception is lost. This commit changes this behavior and chooses to drop the entry from the slow logs and instead lets an exception percolate up to the indexing operation listener loop. Here, the exception will be caught and logged at the warn level. --- .../org/elasticsearch/index/IndexingSlowLog.java | 8 ++++++++ .../elasticsearch/index/IndexingSlowLogTests.java | 12 ++++++++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index 94c3892ef361e..b75cda5b6ca70 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -33,6 +33,8 @@ import org.elasticsearch.index.shard.ShardId; import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Locale; import java.util.concurrent.TimeUnit; public final class IndexingSlowLog implements IndexingOperationListener { @@ -194,6 +196,12 @@ public String toString() { sb.append(", source[").append(Strings.cleanTruncate(source, maxSourceCharsToLog)).append("]"); } catch (IOException e) { sb.append(", source[_failed_to_convert_[").append(e.getMessage()).append("]]"); + /* + * We choose to fail to write to the slow log and instead let this percolate up to the post index listener loop where this + * will be logged at the warn level. + */ + final String message = String.format(Locale.ROOT, "failed to convert source for slow log entry [%s]", sb.toString()); + throw new UncheckedIOException(message, e); } return sb.toString(); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index 45b0d0aa2475c..ff5166e8f1a14 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index; +import com.fasterxml.jackson.core.JsonParseException; import org.apache.lucene.document.NumericDocValuesField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -34,6 +35,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.io.UncheckedIOException; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasToString; @@ -70,9 +72,15 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { "test", null, null, source, XContentType.JSON, null); p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 3); - assertThat(p.toString(), containsString("_failed_to_convert_[Unrecognized token 'invalid':" + final UncheckedIOException e = expectThrows(UncheckedIOException.class, p::toString); + assertThat(e, hasToString(containsString("_failed_to_convert_[Unrecognized token 'invalid':" + " was expecting ('true', 'false' or 'null')\n" - + " at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper")); + + " at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper"))); + assertNotNull(e.getCause()); + assertThat(e.getCause(), instanceOf(JsonParseException.class)); + assertThat(e.getCause(), hasToString(containsString("Unrecognized token 'invalid':" + + " was expecting ('true', 'false' or 'null')\n" + + " at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper"))); } public void testReformatSetting() { From 74eeba612eeb2c8890629129dd8a4d7648aff936 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 14 Mar 2018 11:51:22 +0100 Subject: [PATCH 58/89] added docs for `wrapper` query. Closes #11591 --- .../QueryDSLDocumentationTests.java | 8 +++++++ .../query-dsl/special-queries.asciidoc | 5 ++++ .../java-api/query-dsl/wrapper-query.asciidoc | 11 +++++++++ .../high-level/query-builders.asciidoc | 1 + .../query-dsl/special-queries.asciidoc | 5 ++++ .../query-dsl/wrapper-query.asciidoc | 24 +++++++++++++++++++ 6 files changed, 54 insertions(+) create mode 100644 docs/java-api/query-dsl/wrapper-query.asciidoc create mode 100644 docs/reference/query-dsl/wrapper-query.asciidoc diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java index d4cfab9cc6ab8..7a93c0904791b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java @@ -76,6 +76,7 @@ import static org.elasticsearch.index.query.QueryBuilders.termsQuery; import static org.elasticsearch.index.query.QueryBuilders.typeQuery; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; +import static org.elasticsearch.index.query.QueryBuilders.wrapperQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction; @@ -449,4 +450,11 @@ public void testWildcard() { "k?mch*"); // <2> // end::wildcard } + + public void testWrapper() { + // tag::wrapper + String query = "{\"term\": {\"user\": \"kimchy\"}}"; // <1> + wrapperQuery(query); + // end::wrapper + } } diff --git a/docs/java-api/query-dsl/special-queries.asciidoc b/docs/java-api/query-dsl/special-queries.asciidoc index 4e4d59a6d4aa5..bca3bde3b3f62 100644 --- a/docs/java-api/query-dsl/special-queries.asciidoc +++ b/docs/java-api/query-dsl/special-queries.asciidoc @@ -18,9 +18,14 @@ This query allows a script to act as a filter. Also see the This query finds percolator queries based on documents. +<>:: + +A query that accepts other queries as json or yaml string. + include::mlt-query.asciidoc[] include::script-query.asciidoc[] include::percolate-query.asciidoc[] +include::wrapper-query.asciidoc[] diff --git a/docs/java-api/query-dsl/wrapper-query.asciidoc b/docs/java-api/query-dsl/wrapper-query.asciidoc new file mode 100644 index 0000000000000..3bdf3cc69d30a --- /dev/null +++ b/docs/java-api/query-dsl/wrapper-query.asciidoc @@ -0,0 +1,11 @@ +[[java-query-dsl-wrapper-query]] +==== Wrapper Query + +See {ref}/query-dsl-wrapper-query.html[Wrapper Query] + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{query-dsl-test}[wrapper] +-------------------------------------------------- + +<1> query defined as query builder diff --git a/docs/java-rest/high-level/query-builders.asciidoc b/docs/java-rest/high-level/query-builders.asciidoc index 88204baa8745d..32a3b06505b1d 100644 --- a/docs/java-rest/high-level/query-builders.asciidoc +++ b/docs/java-rest/high-level/query-builders.asciidoc @@ -82,6 +82,7 @@ This page lists all the available search queries with their corresponding `Query | {ref}/query-dsl-mlt-query.html[More Like This] | {query-ref}/MoreLikeThisQueryBuilder.html[MoreLikeThisQueryBuilder] | {query-ref}/QueryBuilders.html#moreLikeThisQuery-org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item:A-[QueryBuilders.moreLikeThisQuery()] | {ref}/query-dsl-script-query.html[Script] | {query-ref}/ScriptQueryBuilder.html[ScriptQueryBuilder] | {query-ref}/QueryBuilders.html#scriptQuery-org.elasticsearch.script.Script-[QueryBuilders.scriptQuery()] | {ref}/query-dsl-percolate-query.html[Percolate] | {percolate-ref}/PercolateQueryBuilder.html[PercolateQueryBuilder] | +| {ref}/query-dsl-wrapper-query.html[Wrapper] | {query-ref}/WrapperQueryBuilder.html[WrapperQueryBuilder] | {query-ref}/QueryBuilders.html#wrapperQuery-java.lang.String-[QueryBuilders.wrapperQuery()] |====== ==== Span queries diff --git a/docs/reference/query-dsl/special-queries.asciidoc b/docs/reference/query-dsl/special-queries.asciidoc index 3e3c140d6f582..a062fa7ddb1fb 100644 --- a/docs/reference/query-dsl/special-queries.asciidoc +++ b/docs/reference/query-dsl/special-queries.asciidoc @@ -19,9 +19,14 @@ This query allows a script to act as a filter. Also see the This query finds queries that are stored as documents that match with the specified document. +<>:: + +A query that accepts other queries as json or yaml string. + include::mlt-query.asciidoc[] include::script-query.asciidoc[] include::percolate-query.asciidoc[] +include::wrapper-query.asciidoc[] diff --git a/docs/reference/query-dsl/wrapper-query.asciidoc b/docs/reference/query-dsl/wrapper-query.asciidoc new file mode 100644 index 0000000000000..4ffef5bfc6bcc --- /dev/null +++ b/docs/reference/query-dsl/wrapper-query.asciidoc @@ -0,0 +1,24 @@ +[[query-dsl-wrapper-query]] +=== Wrapper Query + +A query that accepts any other query as base64 encoded string. + +[source,js] +-------------------------------------------------- +GET /_search +{ + "query" : { + "wrapper": { + "query" : "eyJ0ZXJtIiA6IHsgInVzZXIiIDogIktpbWNoeSIgfX0=" <1> + } + } +} +-------------------------------------------------- +// CONSOLE + +<1> Base64 encoded string: `{"term" : { "user" : "Kimchy" }}` + +This query is more useful in the context of the Java high-level REST client or +transport client to also accept queries as json formatted string. +In these cases queries can be specified as a json or yaml formatted string or +as a query builder (which is a available in the Java high-level REST client). \ No newline at end of file From d61fab95de1b2c969802055183eba5488376921c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 14 Mar 2018 07:45:32 -0400 Subject: [PATCH 59/89] Fix description of die with dignity plugin This commit adjusts the description of the die with dignity plugin which was leftover from a previous iteration on this work. --- qa/die-with-dignity/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/die-with-dignity/build.gradle b/qa/die-with-dignity/build.gradle index a3a9bd3da5800..49a4d3c50873e 100644 --- a/qa/die-with-dignity/build.gradle +++ b/qa/die-with-dignity/build.gradle @@ -20,7 +20,7 @@ apply plugin: 'elasticsearch.esplugin' esplugin { - description 'Out of memory plugin' + description 'Die with dignity plugin' classname 'org.elasticsearch.DieWithDignityPlugin' } From 2138a88d420de60288cd86c8667f9323ec6bf909 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 14 Mar 2018 13:44:12 +0100 Subject: [PATCH 60/89] Fix eclipse compile issues (#29056) Eclipse Oxygen doesn't seem to be able to infer the correct type arguments for Arrays::asList in the given test context. Adding cast to make this more explicit. --- .../org/elasticsearch/core/internal/io/IOUtilsTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java b/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java index c133a9ddc1f3c..ee5af323b5219 100644 --- a/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java +++ b/libs/elasticsearch-core/src/test/java/org/elasticsearch/core/internal/io/IOUtilsTests.java @@ -53,7 +53,7 @@ public void testCloseArray() throws IOException { } public void testCloseIterable() throws IOException { - runTestClose(Arrays::asList, IOUtils::close); + runTestClose((Function>) Arrays::asList, IOUtils::close); } private void runTestClose(final Function function, final CheckedConsumer close) throws IOException { @@ -74,7 +74,7 @@ public void testCloseArrayWithIOExceptions() throws IOException { } public void testCloseIterableWithIOExceptions() throws IOException { - runTestCloseWithIOExceptions(Arrays::asList, IOUtils::close); + runTestCloseWithIOExceptions((Function>) Arrays::asList, IOUtils::close); } private void runTestCloseWithIOExceptions( @@ -113,7 +113,7 @@ public void testDeleteFilesIgnoringExceptionsArray() throws IOException { } public void testDeleteFilesIgnoringExceptionsIterable() throws IOException { - runDeleteFilesIgnoringExceptionsTest(Arrays::asList, IOUtils::deleteFilesIgnoringExceptions); + runDeleteFilesIgnoringExceptionsTest((Function>) Arrays::asList, IOUtils::deleteFilesIgnoringExceptions); } private void runDeleteFilesIgnoringExceptionsTest( From ca6adacd38e85d3104c4392e25183dda0651ad07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 14 Mar 2018 14:43:28 +0100 Subject: [PATCH 61/89] [Docs] Add note to migration doc (#29017) The breaking changes docs should list the fact that `types` and `_type` synonyms for the `type` parameter in the `ids` query have been removed in 6.0. Closes #26974 --- docs/reference/migration/migrate_6_0/search.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/migration/migrate_6_0/search.asciidoc b/docs/reference/migration/migrate_6_0/search.asciidoc index 032de3dd516e1..9bc84e05222b9 100644 --- a/docs/reference/migration/migrate_6_0/search.asciidoc +++ b/docs/reference/migration/migrate_6_0/search.asciidoc @@ -94,6 +94,8 @@ the `match` query but is supported for `match_phrase` and `match_phrase_prefix`. * The deprecated `le` (a synonym for `lte`) and `ge` (a synonym for `gte`) parameter of the `range` query have been removed. +* The deprecated `types` and `_type` synonyms for the `type` parameter of the `ids` query have been removed + * The deprecated multi term rewrite parameters `constant_score_auto`, `constant_score_filter` (synonyms for `constant_score`) have been removed. From 4a3b230a4a2bb62e3ce294c3f8990580f763d2e6 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 14 Mar 2018 11:39:35 -0400 Subject: [PATCH 62/89] Mark field_data as deprecated in /_cache/clear It was deprecated in #28943. --- .../main/resources/rest-api-spec/api/indices.clear_cache.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json index 1f24199fad468..1523c722da31d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json @@ -14,7 +14,7 @@ "params": { "field_data": { "type" : "boolean", - "description" : "Clear field data" + "description" : "Clear field data. This is deprecated. Prefer `fielddata`." }, "fielddata": { "type" : "boolean", @@ -22,7 +22,7 @@ }, "fields": { "type" : "list", - "description" : "A comma-separated list of fields to clear when using the `field_data` parameter (default: all)" + "description" : "A comma-separated list of fields to clear when using the `fielddata` parameter (default: all)" }, "query": { "type" : "boolean", From ba1883f82e7ea1fb9eb03b6762067477f77a932f Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 14 Mar 2018 15:43:53 +0000 Subject: [PATCH 63/89] Enforce that java.io.tmpdir exists on startup (#28217) If the default java.io.tmpdir is used then the startup script creates it, but if a custom java.io.tmpdir is used then the user must ensure it exists before running Elasticsearch. If they forget then it can cause errors that are hard to understand, so this change adds an explicit check early in the bootstrap and reports a clear error if java.io.tmpdir is not an accessible directory. --- .../bootstrap/Elasticsearch.java | 7 ++++++ .../org/elasticsearch/env/Environment.java | 20 ++++++++++++++- .../elasticsearch/env/EnvironmentTests.java | 25 +++++++++++++++++-- 3 files changed, 49 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 1538f0cdf0003..a0646288b1ad0 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -108,6 +108,13 @@ protected void execute(Terminal terminal, OptionSet options, Environment env) th final Path pidFile = pidfileOption.value(options); final boolean quiet = options.has(quietOption); + // a misconfigured java.io.tmpdir can cause hard-to-diagnose problems later, so reject it immediately + try { + env.validateTmpFile(); + } catch (IOException e) { + throw new UserException(ExitCodes.CONFIG, e.getMessage()); + } + try { init(daemonize, pidFile, quiet, env); } catch (NodeValidationException e) { diff --git a/server/src/main/java/org/elasticsearch/env/Environment.java b/server/src/main/java/org/elasticsearch/env/Environment.java index 2433ccf6e8ede..1f4940007afda 100644 --- a/server/src/main/java/org/elasticsearch/env/Environment.java +++ b/server/src/main/java/org/elasticsearch/env/Environment.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import java.io.FileNotFoundException; import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; @@ -87,9 +88,14 @@ public class Environment { private final Path pidFile; /** Path to the temporary file directory used by the JDK */ - private final Path tmpFile = PathUtils.get(System.getProperty("java.io.tmpdir")); + private final Path tmpFile; public Environment(final Settings settings, final Path configPath) { + this(settings, configPath, PathUtils.get(System.getProperty("java.io.tmpdir"))); + } + + // Should only be called directly by this class's unit tests + Environment(final Settings settings, final Path configPath, final Path tmpPath) { final Path homeFile; if (PATH_HOME_SETTING.exists(settings)) { homeFile = PathUtils.get(PATH_HOME_SETTING.get(settings)).normalize(); @@ -103,6 +109,8 @@ public Environment(final Settings settings, final Path configPath) { configFile = homeFile.resolve("config"); } + tmpFile = Objects.requireNonNull(tmpPath); + pluginsFile = homeFile.resolve("plugins"); List dataPaths = PATH_DATA_SETTING.get(settings); @@ -302,6 +310,16 @@ public Path tmpFile() { return tmpFile; } + /** Ensure the configured temp directory is a valid directory */ + public void validateTmpFile() throws IOException { + if (Files.exists(tmpFile) == false) { + throw new FileNotFoundException("Temporary file directory [" + tmpFile + "] does not exist or is not accessible"); + } + if (Files.isDirectory(tmpFile) == false) { + throw new IOException("Configured temporary file directory [" + tmpFile + "] is not a directory"); + } + } + public static FileStore getFileStore(final Path path) throws IOException { return new ESFileStore(Files.getFileStore(path)); } diff --git a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 5ca3f4dc6a591..5ada31b612941 100644 --- a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; import java.nio.file.Path; @@ -28,6 +29,7 @@ import static org.hamcrest.CoreMatchers.endsWith; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -37,11 +39,11 @@ * Simple unit-tests for Environment.java */ public class EnvironmentTests extends ESTestCase { - public Environment newEnvironment() throws IOException { + public Environment newEnvironment() { return newEnvironment(Settings.EMPTY); } - public Environment newEnvironment(Settings settings) throws IOException { + public Environment newEnvironment(Settings settings) { Settings build = Settings.builder() .put(settings) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) @@ -146,4 +148,23 @@ public void testNodeDoesNotRequireLocalStorageButHasPathData() { assertThat(e, hasToString(containsString("node does not require local storage yet path.data is set to [" + pathData + "]"))); } + public void testNonExistentTempPathValidation() { + Settings build = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); + Environment environment = new Environment(build, null, createTempDir().resolve("this_does_not_exist")); + FileNotFoundException e = expectThrows(FileNotFoundException.class, environment::validateTmpFile); + assertThat(e.getMessage(), startsWith("Temporary file directory [")); + assertThat(e.getMessage(), endsWith("this_does_not_exist] does not exist or is not accessible")); + } + + public void testTempPathValidationWhenRegularFile() throws IOException { + Settings build = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); + Environment environment = new Environment(build, null, createTempFile("something", ".test")); + IOException e = expectThrows(IOException.class, environment::validateTmpFile); + assertThat(e.getMessage(), startsWith("Configured temporary file directory [")); + assertThat(e.getMessage(), endsWith(".test] is not a directory")); + } } From ca90117fb98c06605f2faf84fb2c34d9e8524514 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 14 Mar 2018 15:47:18 +0000 Subject: [PATCH 64/89] Protect against NPE in RestNodesAction (#29059) * Protect against NPE in RestNodesAction --- .../rest/action/cat/RestNodesAction.java | 28 +++++--- .../rest/action/cat/RestNodesActionTests.java | 69 +++++++++++++++++++ 2 files changed, 88 insertions(+), 9 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 07f39b54f613a..39130b1713a92 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -236,7 +236,7 @@ protected Table getTableWithHeader(final RestRequest request) { return table; } - private Table buildTable(boolean fullId, RestRequest req, ClusterStateResponse state, NodesInfoResponse nodesInfo, + Table buildTable(boolean fullId, RestRequest req, ClusterStateResponse state, NodesInfoResponse nodesInfo, NodesStatsResponse nodesStats) { DiscoveryNodes nodes = state.getState().nodes(); @@ -271,14 +271,24 @@ private Table buildTable(boolean fullId, RestRequest req, ClusterStateResponse s table.addCell(node.getVersion().toString()); table.addCell(info == null ? null : info.getBuild().shortHash()); table.addCell(jvmInfo == null ? null : jvmInfo.version()); - - long diskTotal = fsInfo.getTotal().getTotal().getBytes(); - long diskUsed = diskTotal - fsInfo.getTotal().getAvailable().getBytes(); - double diskUsedRatio = diskTotal == 0 ? 1.0 : (double) diskUsed / diskTotal; - table.addCell(fsInfo == null ? null : fsInfo.getTotal().getTotal()); - table.addCell(fsInfo == null ? null : new ByteSizeValue(diskUsed)); - table.addCell(fsInfo == null ? null : fsInfo.getTotal().getAvailable()); - table.addCell(fsInfo == null ? null : String.format(Locale.ROOT, "%.2f", 100.0 * diskUsedRatio)); + + + ByteSizeValue diskTotal = null; + ByteSizeValue diskUsed = null; + ByteSizeValue diskAvailable = null; + String diskUsedPercent = null; + if (fsInfo != null) { + diskTotal = fsInfo.getTotal().getTotal(); + diskAvailable = fsInfo.getTotal().getAvailable(); + diskUsed = new ByteSizeValue(diskTotal.getBytes() - diskAvailable.getBytes()); + + double diskUsedRatio = diskTotal.getBytes() == 0 ? 1.0 : (double) diskUsed.getBytes() / diskTotal.getBytes(); + diskUsedPercent = String.format(Locale.ROOT, "%.2f", 100.0 * diskUsedRatio); + } + table.addCell(diskTotal); + table.addCell(diskUsed); + table.addCell(diskAvailable); + table.addCell(diskUsedPercent); table.addCell(jvmStats == null ? null : jvmStats.getMem().getHeapUsed()); table.addCell(jvmStats == null ? null : jvmStats.getMem().getHeapUsedPercent()); diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java new file mode 100644 index 0000000000000..32993a6b7c720 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.cat; + +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.usage.UsageService; +import org.junit.Before; + +import java.util.Collections; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class RestNodesActionTests extends ESTestCase { + + private RestNodesAction action; + + @Before + public void setUpAction() { + UsageService usageService = new UsageService(Settings.EMPTY); + action = new RestNodesAction(Settings.EMPTY, + new RestController(Settings.EMPTY, Collections.emptySet(), null, null, null, usageService)); + } + + public void testBuildTableDoesNotThrowGivenNullNodeInfoAndStats() { + ClusterName clusterName = new ClusterName("cluster-1"); + DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); + builder.add(new DiscoveryNode("node-1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT)); + DiscoveryNodes discoveryNodes = builder.build(); + ClusterState clusterState = mock(ClusterState.class); + when(clusterState.nodes()).thenReturn(discoveryNodes); + + ClusterStateResponse clusterStateResponse = new ClusterStateResponse(clusterName, clusterState, randomNonNegativeLong()); + NodesInfoResponse nodesInfoResponse = new NodesInfoResponse(clusterName, Collections.emptyList(), Collections.emptyList()); + NodesStatsResponse nodesStatsResponse = new NodesStatsResponse(clusterName, Collections.emptyList(), Collections.emptyList()); + + action.buildTable(false, new FakeRestRequest(), clusterStateResponse, nodesInfoResponse, nodesStatsResponse); + } +} From 8abeede8d291291d1ff0076029aea740357d90c2 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 14 Mar 2018 17:16:48 +0100 Subject: [PATCH 65/89] Removed outdated docs about shading Elasticsearch --- docs/java-api/index.asciidoc | 56 ------------------------------------ 1 file changed, 56 deletions(-) diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 6d3b616e89bbd..c7d582ca98441 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -150,62 +150,6 @@ and add it as a dependency. As an example, we will use the `slf4j-simple` logger -------------------------------------------------- - -== Dealing with JAR dependency conflicts - -If you want to use Elasticsearch in your Java application, you may have to deal with version conflicts with third party -dependencies like Guava and Joda. For instance, perhaps Elasticsearch uses Joda 2.8, while your code uses Joda 2.1. - -You have two choices: - -* The simplest solution is to upgrade. Newer module versions are likely to have fixed old bugs. -The further behind you fall, the harder it will be to upgrade later. Of course, it is possible that you are using a -third party dependency that in turn depends on an outdated version of a package, which prevents you from upgrading. - -* The second option is to relocate the troublesome dependencies and to shade them either with your own application -or with Elasticsearch and any plugins needed by the Elasticsearch client. - -The https://www.elastic.co/blog/to-shade-or-not-to-shade["To shade or not to shade" blog post] describes -all the steps for doing so. - -== Embedding jar with dependencies - -If you want to create a single jar containing your application and all dependencies, you should not -use `maven-assembly-plugin` for that because it can not deal with `META-INF/services` structure which is -required by Lucene jars. - -Instead, you can use `maven-shade-plugin` and configure it as follow: - -[source,xml] --------------------------------------------------- - - org.apache.maven.plugins - maven-shade-plugin - 2.4.1 - - - package - shade - - - - - - - - --------------------------------------------------- - -Note that if you have a `main` class you want to automatically call when running `java -jar yourjar.jar`, just add -it to the `transformers`: - -[source,xml] --------------------------------------------------- - - org.elasticsearch.demo.Generate - --------------------------------------------------- - :client-tests: {docdir}/../../server/src/test/java/org/elasticsearch/client/documentation include::client.asciidoc[] From 56ee723859be3bad7cfb616080a7c8013f64682c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 14 Mar 2018 13:02:55 -0400 Subject: [PATCH 66/89] Fix typo in terminate after API docs This commit fixes a minor typo in the terminate after Java API docs. Relates #29065 --- docs/java-api/search.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/java-api/search.asciidoc b/docs/java-api/search.asciidoc index dfc2767520237..78fb4539641d5 100644 --- a/docs/java-api/search.asciidoc +++ b/docs/java-api/search.asciidoc @@ -128,7 +128,7 @@ documentation for details. The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. If set, you will be able to check if the operation terminated early by asking for `isTerminatedEarly()` in the -`SearchResponse` onject: +`SearchResponse` object: [source,java] -------------------------------------------------- From c45e2089a77935ee1ab87522818123c97ef7e483 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 14 Mar 2018 14:25:14 -0400 Subject: [PATCH 67/89] Cleanup exception handling in IOUtils (#29069) When we copied IOUtils into the Elasticsearch codebase from Lucene, we brought with it its handling of throwables which are out of whack with how we handle throwables in our codebase. This commit modifies our copy of IOUtils to be consistent with how we handle throwables today: do not catch them. We take advantage of this cleanup to simplify IOUtils. --- .../core/internal/io/IOUtils.java | 77 ++++--------------- 1 file changed, 15 insertions(+), 62 deletions(-) diff --git a/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java index 7507327199baf..eaa4df768cd71 100644 --- a/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java +++ b/libs/elasticsearch-core/src/main/java/org/elasticsearch/core/internal/io/IOUtils.java @@ -58,23 +58,29 @@ public static void close(final Closeable... objects) throws IOException { * @see #close(Closeable...) */ public static void close(final Iterable objects) throws IOException { - Throwable th = null; + Exception ex = null; for (final Closeable object : objects) { try { if (object != null) { object.close(); } - } catch (final Throwable t) { - addSuppressed(th, t); - if (th == null) { - th = t; + } catch (final IOException | RuntimeException e) { + if (ex == null) { + ex = e; + } else { + ex.addSuppressed(e); } } } - if (th != null) { - throw rethrowAlways(th); + if (ex != null) { + if (ex instanceof IOException) { + throw (IOException) ex; + } else { + // since we only assigned an IOException or a RuntimeException to ex above, in this case ex must be a RuntimeException + throw (RuntimeException) ex; + } } } @@ -101,65 +107,12 @@ public static void closeWhileHandlingException(final Iterable - * This method never returns any value, even though it declares a return value of type {@link Error}. The return - * value declaration is very useful to let the compiler know that the code path following the invocation of this method is unreachable. - * So in most cases the invocation of this method will be guarded by an {@code if} and used together with a {@code throw} statement, as - * in: - *

- *

{@code
-     *   if (t != null) throw IOUtils.rethrowAlways(t)
-     * }
-     * 
- * - * @param th the throwable to rethrow; must not be null - * @return this method always results in an exception, it never returns any value; see method documentation for details and usage - * example - * @throws IOException if the argument was an instance of {@link IOException} - * @throws RuntimeException with the {@link RuntimeException#getCause()} set to the argument, if it was not an instance of - * {@link IOException} - */ - private static Error rethrowAlways(final Throwable th) throws IOException, RuntimeException { - if (th == null) { - throw new AssertionError("rethrow argument must not be null."); - } - - if (th instanceof IOException) { - throw (IOException) th; - } - - if (th instanceof RuntimeException) { - throw (RuntimeException) th; - } - - if (th instanceof Error) { - throw (Error) th; - } - - throw new RuntimeException(th); - } - /** * Deletes all given files, suppressing all thrown {@link IOException}s. Some of the files may be null, if so they are ignored. * @@ -180,7 +133,7 @@ public static void deleteFilesIgnoringExceptions(final Collection Date: Wed, 14 Mar 2018 14:27:01 -0400 Subject: [PATCH 68/89] Docs: HighLevelRestClient#ping (#29070) Add documentation for `HighLevelRestClient#ping`. Relates to #28389 --- ...java => MiscellaneousDocumentationIT.java} | 24 +++++++++---------- .../high-level/getting-started.asciidoc | 4 ++-- docs/java-rest/high-level/migration.asciidoc | 6 ++--- .../high-level/miscellaneous/main.asciidoc | 4 ++-- .../high-level/miscellaneous/ping.asciidoc | 13 ++++++++++ .../high-level/supported-apis.asciidoc | 2 ++ 6 files changed, 32 insertions(+), 21 deletions(-) rename client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/{MainDocumentationIT.java => MiscellaneousDocumentationIT.java} (81%) create mode 100644 docs/java-rest/high-level/miscellaneous/ping.asciidoc diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java similarity index 81% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java index b6c5ae5028560..3e0608120f755 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MainDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java @@ -31,20 +31,10 @@ import java.io.IOException; /** - * This class is used to generate the Java Main API documentation. - * You need to wrap your code between two tags like: - * // tag::example[] - * // end::example[] - * - * Where example is your tag name. - * - * Then in the documentation, you can extract what is between tag and end tags with - * ["source","java",subs="attributes,callouts,macros"] - * -------------------------------------------------- - * include-tagged::{doc-tests}/MainDocumentationIT.java[example] - * -------------------------------------------------- + * Documentation for miscellaneous APIs in the high level java client. + * Code wrapped in {@code tag} and {@code end} tags is included in the docs. */ -public class MainDocumentationIT extends ESRestHighLevelClientTestCase { +public class MiscellaneousDocumentationIT extends ESRestHighLevelClientTestCase { public void testMain() throws IOException { RestHighLevelClient client = highLevelClient(); @@ -68,6 +58,14 @@ public void testMain() throws IOException { } } + public void testPing() throws IOException { + RestHighLevelClient client = highLevelClient(); + //tag::ping-execute + boolean response = client.ping(); + //end::ping-execute + assertTrue(response); + } + public void testInitializationFromClientBuilder() throws IOException { //tag::rest-high-level-client-init RestHighLevelClient client = new RestHighLevelClient( diff --git a/docs/java-rest/high-level/getting-started.asciidoc b/docs/java-rest/high-level/getting-started.asciidoc index ba5c7ba273eec..14a5058eb7272 100644 --- a/docs/java-rest/high-level/getting-started.asciidoc +++ b/docs/java-rest/high-level/getting-started.asciidoc @@ -126,7 +126,7 @@ to be built as follows: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[rest-high-level-client-init] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-client-init] -------------------------------------------------- The high-level client will internally create the low-level client used to @@ -139,7 +139,7 @@ method, which will close the internal `RestClient` instance. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[rest-high-level-client-close] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-client-close] -------------------------------------------------- In the rest of this documentation about the Java High Level Client, the `RestHighLevelClient` instance diff --git a/docs/java-rest/high-level/migration.asciidoc b/docs/java-rest/high-level/migration.asciidoc index 44e895c9c712e..1349ccb35fe3b 100644 --- a/docs/java-rest/high-level/migration.asciidoc +++ b/docs/java-rest/high-level/migration.asciidoc @@ -64,7 +64,7 @@ argument: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[rest-high-level-client-init] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-client-init] -------------------------------------------------- NOTE: The `RestClient` uses Elasticsearch's HTTP service which is @@ -91,7 +91,7 @@ must be replaced with: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[rest-high-level-client-close] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-client-close] -------------------------------------------------- === Changing the application's code @@ -351,5 +351,3 @@ body, but any other JSON parser could have been use instead. We love to hear from you! Please give us your feedback about your migration experience and how to improve the Java High Level Rest Client on https://discuss.elastic.co/[our forum]. - - diff --git a/docs/java-rest/high-level/miscellaneous/main.asciidoc b/docs/java-rest/high-level/miscellaneous/main.asciidoc index b37e85ee8bd7b..eddbf4a3d0b73 100644 --- a/docs/java-rest/high-level/miscellaneous/main.asciidoc +++ b/docs/java-rest/high-level/miscellaneous/main.asciidoc @@ -8,7 +8,7 @@ Cluster information can be retrieved using the `info()` method: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[main-execute] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[main-execute] -------------------------------------------------- [[java-rest-high-main-response]] @@ -18,7 +18,7 @@ The returned `MainResponse` provides various kinds of information about the clus ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/MainDocumentationIT.java[main-response] +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[main-response] -------------------------------------------------- <1> Retrieve the name of the cluster as a `ClusterName` <2> Retrieve the unique identifier of the cluster diff --git a/docs/java-rest/high-level/miscellaneous/ping.asciidoc b/docs/java-rest/high-level/miscellaneous/ping.asciidoc new file mode 100644 index 0000000000000..6cff46a62c5eb --- /dev/null +++ b/docs/java-rest/high-level/miscellaneous/ping.asciidoc @@ -0,0 +1,13 @@ +[[java-rest-high-ping]] +=== Ping API + +[[java-rest-high-ping-request]] +==== Execution + +The `ping()` method checks if the cluster is up and available to +process requests and returns a boolean: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[ping-execute] +-------------------------------------------------- diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 9fb8bd8c66700..fa2f57069ba93 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -37,8 +37,10 @@ include::search/scroll.asciidoc[] The Java High Level REST Client supports the following Miscellaneous APIs: * <> +* <> include::miscellaneous/main.asciidoc[] +include::miscellaneous/ping.asciidoc[] == Indices APIs From f501139934b9cfa6ebcf6a7cf602efda5a3ede70 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 14 Mar 2018 13:38:24 -0700 Subject: [PATCH 69/89] [DOCS] Add monitoring upgrade details (#29041) --- docs/reference/upgrade/upgrade-node.asciidoc | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/reference/upgrade/upgrade-node.asciidoc b/docs/reference/upgrade/upgrade-node.asciidoc index db9d352e83184..c97b84ef67004 100644 --- a/docs/reference/upgrade/upgrade-node.asciidoc +++ b/docs/reference/upgrade/upgrade-node.asciidoc @@ -16,8 +16,15 @@ To upgrade using a zip or compressed tarball: .. Set `path.data` in `config/elasticsearch.yml` to point to your external data directory. If you are not using an external `data` directory, copy - your old data directory over to the new installation. + your old data directory over to the new installation. + ++ +-- +IMPORTANT: If you use {monitoring}, re-use the data directory when you upgrade +{es}. Monitoring identifies unique {es} nodes by using the persistent UUID, which +is stored in the data directory. + +-- .. Set `path.logs` in `config/elasticsearch.yml` to point to the location where you want to store your logs. If you do not specify this setting, - logs are stored in the directory you extracted the archive to. \ No newline at end of file + logs are stored in the directory you extracted the archive to. From 6de5818552b218bfdcebfb2eda6ddbe6cb82d872 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 14 Mar 2018 13:47:57 -0600 Subject: [PATCH 70/89] Decouple XContentBuilder from BytesReference (#28972) * Decouple XContentBuilder from BytesReference This commit removes all mentions of `BytesReference` from `XContentBuilder`. This is needed so that we can completely decouple the XContent code and move it into its own dependency. While this change appears large, it is due to two main changes, moving `.bytes()` and `.string()` out of XContentBuilder itself into static methods `BytesReference.bytes` and `Strings.toString` respectively. The rest of the change is code reacting to these changes (the majority of it in tests). Relates to #28504 --- .../org/elasticsearch/client/Request.java | 4 +- .../java/org/elasticsearch/client/CrudIT.java | 3 +- .../client/RestHighLevelClientTests.java | 3 +- .../org/elasticsearch/client/SearchIT.java | 3 +- .../documentation/CRUDDocumentationIT.java | 4 +- .../MigrationDocumentationIT.java | 5 +- .../common/GrokProcessorGetActionTests.java | 3 +- .../ingest/common/JsonProcessorTests.java | 3 +- .../mustache/CustomMustacheFactory.java | 3 +- .../mustache/RestSearchTemplateAction.java | 3 +- .../mustache/SearchTemplateResponse.java | 5 +- .../mustache/MultiSearchTemplateIT.java | 5 +- .../script/mustache/MustacheTests.java | 5 +- .../script/mustache/SearchTemplateIT.java | 4 +- ...angeFieldQueryStringQueryBuilderTests.java | 5 +- .../mapper/ScaledFloatFieldMapperTests.java | 158 ++--- .../mapper/TokenCountFieldMapperTests.java | 21 +- .../mapper/ParentJoinFieldMapperTests.java | 118 ++-- .../join/query/HasChildQueryBuilderTests.java | 3 +- .../query/HasParentQueryBuilderTests.java | 3 +- .../LegacyHasChildQueryBuilderTests.java | 9 +- .../LegacyHasParentQueryBuilderTests.java | 13 +- .../LegacyParentIdQueryBuilderTests.java | 9 +- .../join/query/ParentChildTestCase.java | 3 +- .../join/query/ParentIdQueryBuilderTests.java | 6 +- .../percolator/PercolateQueryBuilder.java | 4 +- .../percolator/PercolatorFieldMapper.java | 2 +- .../percolator/CandidateQueryTests.java | 9 +- .../PercolateQueryBuilderTests.java | 11 +- .../PercolatorFieldMapperTests.java | 118 ++-- .../percolator/PercolatorQuerySearchIT.java | 88 +-- .../PercolatorQuerySearchTests.java | 20 +- .../index/rankeval/RankEvalResponseTests.java | 2 +- .../index/rankeval/RankEvalSpecTests.java | 5 +- .../AbstractBulkByQueryRestHandler.java | 4 +- .../index/reindex/RestReindexAction.java | 7 +- .../index/reindex/TransportReindexAction.java | 4 +- .../reindex/remote/RemoteRequestBuilders.java | 10 +- .../reindex/remote/RemoteResponseParsers.java | 3 +- .../index/reindex/RestReindexActionTests.java | 4 +- .../rest/Netty4HeadBodyIsEmptyIT.java | 11 +- .../ICUCollationKeywordFieldMapperTests.java | 176 +++--- .../murmur3/Murmur3FieldMapperTests.java | 30 +- .../index/mapper/size/SizeMappingTests.java | 40 +- .../AzureStorageSettingsFilterTests.java | 3 +- .../gcs/GoogleCloudStorageTestServer.java | 5 +- ...rossClusterSearchUnavailableClusterIT.java | 3 +- .../upgrades/FullClusterRestartIT.java | 25 +- .../elasticsearch/backwards/IndexingIT.java | 19 +- .../elasticsearch/bwc/QueryBuilderBWCIT.java | 3 +- .../http/ContextAndHeaderTransportIT.java | 5 +- .../elasticsearch/http/DeprecationHttpIT.java | 3 +- .../org/elasticsearch/wildfly/WildflyIT.java | 3 +- .../cluster/node/info/NodesInfoResponse.java | 4 +- .../node/stats/NodesStatsResponse.java | 3 +- .../node/usage/NodesUsageResponse.java | 6 +- .../put/PutRepositoryRequest.java | 3 +- .../ClusterUpdateSettingsRequest.java | 9 +- .../create/CreateSnapshotRequest.java | 2 +- .../restore/RestoreSnapshotRequest.java | 4 +- .../cluster/stats/ClusterStatsResponse.java | 3 +- .../action/admin/indices/alias/Alias.java | 11 +- .../indices/alias/IndicesAliasesRequest.java | 9 +- .../admin/indices/analyze/AnalyzeRequest.java | 2 +- .../indices/create/CreateIndexRequest.java | 22 +- .../mapping/get/GetFieldMappingsResponse.java | 5 +- .../mapping/put/PutMappingRequest.java | 8 +- .../settings/put/UpdateSettingsRequest.java | 3 +- .../indices/stats/IndicesStatsResponse.java | 3 +- .../template/put/PutIndexTemplateRequest.java | 11 +- .../action/index/IndexRequest.java | 5 +- .../action/search/MultiSearchRequest.java | 4 +- .../termvectors/TermVectorsRequest.java | 2 +- .../termvectors/TermVectorsResponse.java | 3 +- .../action/update/UpdateHelper.java | 2 +- .../cluster/metadata/AliasMetaData.java | 9 +- .../metadata/IndexTemplateMetaData.java | 5 +- .../cluster/metadata/MappingMetaData.java | 3 +- .../cluster/metadata/MetaData.java | 2 +- .../org/elasticsearch/common/Strings.java | 13 +- .../common/bytes/BytesReference.java | 17 + .../common/document/DocumentField.java | 11 +- .../common/settings/Setting.java | 4 +- .../common/settings/Settings.java | 2 +- .../common/settings/SettingsModule.java | 3 +- .../common/xcontent/AbstractObjectParser.java | 2 +- .../common/xcontent/XContentBuilder.java | 68 +-- .../common/xcontent/XContentHelper.java | 17 +- .../common/xcontent/XContentParser.java | 1 - .../elasticsearch/index/get/GetResult.java | 2 +- .../index/get/ShardGetService.java | 2 +- .../index/mapper/MapperService.java | 3 +- .../elasticsearch/index/mapper/Mapping.java | 3 +- .../index/query/MoreLikeThisQueryBuilder.java | 11 +- .../functionscore/DecayFunctionBuilder.java | 6 +- .../functionscore/DecayFunctionParser.java | 2 +- .../indices/recovery/RecoveryState.java | 3 +- .../ingest/PipelineConfiguration.java | 3 +- .../elasticsearch/rest/BytesRestResponse.java | 4 +- .../java/org/elasticsearch/script/Script.java | 10 +- .../elasticsearch/script/ScriptException.java | 17 +- .../script/StoredScriptSource.java | 5 +- .../org/elasticsearch/search/SearchHit.java | 2 +- .../fetch/subphase/FetchSourceSubPhase.java | 3 +- .../searchafter/SearchAfterBuilder.java | 3 +- .../elasticsearch/search/suggest/Suggest.java | 3 +- .../CompletionSuggestionBuilder.java | 9 +- .../completion/context/ContextMapping.java | 3 +- .../DirectCandidateGeneratorBuilder.java | 3 +- .../org/elasticsearch/tasks/TaskResult.java | 5 +- .../ElasticsearchExceptionTests.java | 16 +- .../action/DocWriteResponseTests.java | 5 +- .../ClusterAllocationExplainActionTests.java | 3 +- .../ClusterAllocationExplainIT.java | 3 +- .../ClusterAllocationExplanationTests.java | 3 +- .../node/tasks/TransportTasksActionTests.java | 6 +- .../reroute/ClusterRerouteRequestTests.java | 3 +- .../reroute/ClusterRerouteResponseTests.java | 7 +- .../ClusterUpdateSettingsRequestTests.java | 4 +- .../indices/alias/AliasActionsTests.java | 3 +- .../create/CreateIndexRequestTests.java | 4 +- .../mapping/put/PutMappingRequestTests.java | 3 +- .../rollover/RolloverRequestTests.java | 2 +- .../IndicesShardStoreResponseTests.java | 4 +- .../MetaDataIndexTemplateServiceTests.java | 7 +- .../put/PutIndexTemplateRequestTests.java | 5 +- .../action/bulk/BulkItemResponseTests.java | 2 +- .../action/bulk/BulkProcessorIT.java | 5 +- .../action/get/GetResponseTests.java | 2 +- .../ingest/WriteableIngestDocumentTests.java | 3 +- .../action/main/MainResponseTests.java | 3 +- .../search/ClearScrollRequestTests.java | 3 +- .../SearchPhaseExecutionExceptionTests.java | 2 +- .../search/SearchScrollRequestTests.java | 3 +- ...ultShardOperationFailedExceptionTests.java | 3 +- .../replication/ReplicationResponseTests.java | 2 +- .../action/update/UpdateRequestTests.java | 3 +- .../metadata/IndexCreationTaskTests.java | 5 +- .../cluster/metadata/IndexGraveyardTests.java | 3 +- .../cluster/metadata/IndexMetaDataTests.java | 3 +- .../metadata/IndexTemplateMetaDataTests.java | 2 +- .../cluster/metadata/MetaDataTests.java | 12 +- .../cluster/routing/AllocationIdTests.java | 2 +- .../common/geo/GeoJsonShapeParserTests.java | 69 +-- .../common/settings/SettingsFilterTests.java | 3 +- .../common/settings/SettingsTests.java | 7 +- .../common/xcontent/BaseXContentTestCase.java | 43 +- .../ConstructingObjectParserTests.java | 21 +- .../common/xcontent/ObjectParserTests.java | 10 +- .../common/xcontent/XContentFactoryTests.java | 9 +- .../common/xcontent/XContentParserTests.java | 12 +- .../builder/XContentBuilderTests.java | 58 +- .../cbor/CborXContentParserTests.java | 2 +- .../support/AbstractFilteringTestCase.java | 22 +- .../support/XContentMapValuesTests.java | 31 +- .../AbstractXContentFilteringTestCase.java | 8 +- .../discovery/zen/ZenDiscoveryIT.java | 3 +- .../document/DocumentActionsIT.java | 13 +- .../gateway/RecoveryFromGatewayIT.java | 9 +- .../org/elasticsearch/get/GetActionIT.java | 16 +- .../index/IndexServiceTests.java | 3 +- .../org/elasticsearch/index/IndexTests.java | 3 +- .../index/IndexingSlowLogTests.java | 2 +- .../index/analysis/PreBuiltAnalyzerTests.java | 5 +- .../fielddata/BinaryDVFieldDataTests.java | 14 +- .../index/mapper/AllFieldMapperTests.java | 3 +- .../index/mapper/BinaryFieldMapperTests.java | 22 +- .../index/mapper/BooleanFieldMapperTests.java | 83 +-- .../index/mapper/CamelCaseFieldNameTests.java | 17 +- .../mapper/CompletionFieldMapperTests.java | 254 ++++---- .../mapper/CopyToMapperIntegrationIT.java | 5 +- .../index/mapper/CopyToMapperTests.java | 99 ++-- .../index/mapper/DateFieldMapperTests.java | 194 +++--- .../mapper/DocumentMapperMergeTests.java | 117 ++-- .../mapper/DocumentMapperParserTests.java | 19 +- .../index/mapper/DocumentParserTests.java | 551 +++++++++--------- .../index/mapper/DoubleIndexingDocTests.java | 26 +- .../index/mapper/DynamicMappingTests.java | 231 ++++---- .../mapper/DynamicMappingVersionTests.java | 16 +- .../index/mapper/DynamicTemplateTests.java | 9 +- .../index/mapper/DynamicTemplatesTests.java | 11 +- .../mapper/ExternalFieldMapperTests.java | 48 +- .../mapper/FieldNamesFieldMapperTests.java | 60 +- .../mapper/GeoPointFieldMapperTests.java | 150 ++--- .../mapper/GeoShapeFieldMapperTests.java | 85 +-- .../index/mapper/IdFieldMapperTests.java | 8 +- .../index/mapper/IndexFieldMapperTests.java | 27 +- .../index/mapper/IpFieldMapperTests.java | 124 ++-- .../index/mapper/IpRangeFieldMapperTests.java | 10 +- .../mapper/JavaMultiFieldMergeTests.java | 6 +- .../index/mapper/KeywordFieldMapperTests.java | 180 +++--- .../index/mapper/MapperServiceTests.java | 36 +- .../index/mapper/MapperTests.java | 5 +- .../mapper/MultiFieldCopyToMapperTests.java | 3 +- .../MultiFieldIncludeInAllMapperTests.java | 3 +- .../index/mapper/MultiFieldTests.java | 13 +- .../index/mapper/NestedObjectMapperTests.java | 220 +++---- .../mapper/NullValueObjectMappingTests.java | 44 +- .../index/mapper/NullValueTests.java | 6 +- .../index/mapper/NumberFieldMapperTests.java | 191 +++--- .../index/mapper/ObjectMapperTests.java | 159 +++-- .../index/mapper/ParentFieldMapperTests.java | 46 +- .../index/mapper/RangeFieldMapperTests.java | 100 ++-- ...angeFieldQueryStringQueryBuilderTests.java | 5 +- .../index/mapper/RootObjectMapperTests.java | 49 +- .../index/mapper/RoutingFieldMapperTests.java | 26 +- .../index/mapper/SourceFieldMapperTests.java | 100 ++-- .../mapper/StoredNumericValuesTests.java | 69 ++- .../index/mapper/TextFieldMapperTests.java | 227 ++++---- .../mapper/UpdateMappingOnClusterIT.java | 5 +- .../index/mapper/UpdateMappingTests.java | 32 +- .../query/GeoShapeQueryBuilderTests.java | 3 +- .../index/query/MatchQueryBuilderTests.java | 5 +- .../query/MoreLikeThisQueryBuilderTests.java | 3 +- .../index/query/NestedQueryBuilderTests.java | 5 +- .../query/QueryStringQueryBuilderTests.java | 9 +- .../index/query/RangeQueryRewriteTests.java | 9 +- .../query/SpanFirstQueryBuilderTests.java | 5 +- .../index/query/SpanNotQueryBuilderTests.java | 9 +- .../index/query/TermsQueryBuilderTests.java | 7 +- .../query/TermsSetQueryBuilderTests.java | 6 +- .../FunctionScoreQueryBuilderTests.java | 13 +- .../index/reindex/BulkByScrollTaskTests.java | 7 +- .../search/geo/GeoPointParsingTests.java | 17 +- .../index/shard/IndexShardTests.java | 7 +- .../shard/PrimaryReplicaSyncerTests.java | 3 +- .../index/similarity/SimilarityTests.java | 37 +- .../snapshots/blobstore/FileInfoTests.java | 4 +- .../index/translog/TranslogTests.java | 3 +- .../mapping/SimpleGetFieldMappingsIT.java | 9 +- .../RandomExceptionCircuitBreakerIT.java | 33 +- .../indices/state/OpenCloseIndexIT.java | 5 +- .../elasticsearch/ingest/IngestClientIT.java | 27 +- ...gestProcessorNotInstalledOnAllNodesIT.java | 4 +- .../ingest/PipelineConfigurationTests.java | 2 +- .../org/elasticsearch/mget/SimpleMgetIT.java | 4 +- .../nodesinfo/NodeInfoStreamingTests.java | 3 +- .../repositories/IndexIdTests.java | 3 +- .../repositories/RepositoryDataTests.java | 3 +- .../rest/BytesRestResponseTests.java | 2 +- .../rest/RestControllerTests.java | 4 +- .../rest/action/RestMainActionTests.java | 2 +- .../script/ScriptMetaDataTests.java | 21 +- .../script/ScriptServiceTests.java | 4 +- .../org/elasticsearch/script/ScriptTests.java | 3 +- .../script/StoredScriptSourceTests.java | 8 +- .../script/StoredScriptTests.java | 39 +- .../search/NestedIdentityTests.java | 5 +- .../elasticsearch/search/SearchHitTests.java | 3 +- .../elasticsearch/search/SearchHitsTests.java | 3 +- .../search/SearchSortValuesTests.java | 3 +- .../aggregations/AggregationsTests.java | 2 +- .../AggregatorFactoriesTests.java | 2 +- .../aggregations/FiltersAggsRewriteIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 3 +- .../SignificantTermsSignificanceScoreIT.java | 3 +- .../bucket/histogram/ExtendedBoundsTests.java | 4 +- .../SignificanceHeuristicTests.java | 3 +- .../metrics/AbstractGeoTestCase.java | 3 +- .../metrics/InternalStatsTests.java | 2 +- .../basic/SearchWithRandomExceptionsIT.java | 5 +- .../basic/SearchWithRandomIOExceptionsIT.java | 5 +- .../builder/SearchSourceBuilderTests.java | 4 +- .../subphase/FetchSourceSubPhaseTests.java | 3 +- .../highlight/HighlightFieldTests.java | 5 +- .../highlight/HighlighterSearchIT.java | 9 +- .../search/fields/SearchFieldsIT.java | 200 +++---- .../elasticsearch/search/geo/GeoFilterIT.java | 13 +- .../search/geo/GeoShapeIntegrationIT.java | 29 +- .../search/geo/GeoShapeQueryTests.java | 30 +- .../ShardSearchTransportRequestTests.java | 2 +- .../search/morelikethis/MoreLikeThisIT.java | 17 +- .../search/profile/ProfileResultTests.java | 9 +- .../profile/query/CollectorResultTests.java | 9 +- .../elasticsearch/search/query/ExistsIT.java | 3 +- .../search/query/SimpleQueryStringIT.java | 10 +- .../search/scroll/SearchScrollIT.java | 3 +- .../searchafter/SearchAfterBuilderTests.java | 3 +- .../search/slice/SearchSliceIT.java | 5 +- .../search/sort/SimpleSortIT.java | 9 +- .../search/sort/SortBuilderTests.java | 3 +- .../suggest/CompletionSuggestSearchIT.java | 23 +- .../search/suggest/SuggestSearchIT.java | 71 +-- .../search/suggest/SuggestTests.java | 2 +- .../CategoryContextMappingTests.java | 241 ++++---- .../completion/GeoContextMappingTests.java | 136 ++--- .../SharedClusterSnapshotRestoreIT.java | 4 +- .../snapshots/SnapshotRequestsTests.java | 4 +- .../elasticsearch/tasks/TaskResultTests.java | 3 +- .../ThreadPoolSerializationTests.java | 5 +- .../RemoteClusterConnectionTests.java | 5 +- .../elasticsearch/update/UpdateNoopIT.java | 3 +- .../AbstractNumericFieldMapperTestCase.java | 5 +- .../search/RandomSearchRequestGenerator.java | 3 +- .../test/AbstractQueryTestCase.java | 8 +- .../elasticsearch/test/ESIntegTestCase.java | 3 +- .../org/elasticsearch/test/ESTestCase.java | 8 +- .../org/elasticsearch/test/RandomObjects.java | 9 +- .../elasticsearch/test/XContentTestUtils.java | 6 +- .../test/rest/ESRestTestCase.java | 2 +- .../yaml/ClientYamlTestExecutionContext.java | 3 +- .../rest/yaml/ClientYamlTestResponse.java | 3 +- .../test/XContentTestUtilsTests.java | 23 +- .../ElasticsearchAssertionsTests.java | 17 +- .../test/rest/yaml/ObjectPathTests.java | 27 +- .../test/test/ESTestCaseTests.java | 2 +- 306 files changed, 3577 insertions(+), 3375 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java index 89f149899485d..7c36d0affcf97 100755 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java @@ -330,7 +330,7 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { } metadata.endObject(); - BytesRef metadataSource = metadata.bytes().toBytesRef(); + BytesRef metadataSource = BytesReference.bytes(metadata).toBytesRef(); content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length); content.write(separator); } @@ -345,7 +345,7 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { LoggingDeprecationHandler.INSTANCE, indexSource, indexXContentType)) { try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) { builder.copyCurrentStructure(parser); - source = builder.bytes().toBytesRef(); + source = BytesReference.bytes(builder).toBytesRef(); } } } else if (opType == DocWriteRequest.OpType.UPDATE) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 559dded4f4d7e..352a6a5e61d1b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -617,7 +617,8 @@ public void testBulk() throws IOException { bulkRequest.add(deleteRequest); } else { - BytesReference source = XContentBuilder.builder(xContentType.xContent()).startObject().field("id", i).endObject().bytes(); + BytesReference source = BytesReference.bytes(XContentBuilder.builder(xContentType.xContent()) + .startObject().field("id", i).endObject()); if (opType == DocWriteRequest.OpType.INDEX) { IndexRequest indexRequest = new IndexRequest("index", "test", id).source(source, xContentType); if (erroneous) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index c3d73d11ca2cd..242cec5e04c12 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -55,6 +55,7 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -272,7 +273,7 @@ private static HttpEntity createBinaryEntity(XContentBuilder xContentBuilder, Co builder.startObject(); builder.field("field", "value"); builder.endObject(); - return new ByteArrayEntity(builder.bytes().toBytesRef().bytes, contentType); + return new ByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index ca86954cd1be2..a762eadcb54d2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; @@ -478,7 +479,7 @@ public void testSearchScroll() throws Exception { for (int i = 0; i < 100; i++) { XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); - HttpEntity entity = new NStringEntity(builder.string(), ContentType.APPLICATION_JSON); + HttpEntity entity = new NStringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); client().performRequest(HttpPut.METHOD_NAME, "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity); } client().performRequest(HttpPost.METHOD_NAME, "/test/_refresh"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 16fa4f8d69cfb..95e5364756424 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -266,13 +266,13 @@ public void testUpdate() throws Exception { assertSame(indexResponse.status(), RestStatus.CREATED); XContentType xContentType = XContentType.JSON; - String script = XContentBuilder.builder(xContentType.xContent()) + String script = Strings.toString(XContentBuilder.builder(xContentType.xContent()) .startObject() .startObject("script") .field("lang", "painless") .field("code", "ctx._source.field += params.count") .endObject() - .endObject().string(); + .endObject()); HttpEntity body = new NStringEntity(script, ContentType.create(xContentType.mediaType())); Response response = client().performRequest(HttpPost.METHOD_NAME, "/_scripts/increment-field", emptyMap(), body); assertEquals(response.getStatusLine().getStatusCode(), RestStatus.OK.getStatus()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java index 1a8b635234e5c..fe9e8108acee3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java @@ -33,6 +33,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -75,7 +76,7 @@ public void testCreateIndex() throws IOException { .put(SETTING_NUMBER_OF_REPLICAS, 0) .build(); - String payload = XContentFactory.jsonBuilder() // <2> + String payload = Strings.toString(XContentFactory.jsonBuilder() // <2> .startObject() .startObject("settings") // <3> .value(indexSettings) @@ -89,7 +90,7 @@ public void testCreateIndex() throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); // <5> diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java index aa54044f8454c..cc8ca33161be4 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorGetActionTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.common; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent; @@ -63,7 +64,7 @@ public void testResponseToXContent() throws Exception { GrokProcessorGetAction.Response response = new GrokProcessorGetAction.Response(TEST_PATTERNS); try (XContentBuilder builder = JsonXContent.contentBuilder()) { response.toXContent(builder, ToXContent.EMPTY_PARAMS); - Map converted = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map converted = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); Map patterns = (Map) converted.get("patterns"); assertThat(patterns.size(), equalTo(1)); assertThat(patterns.get("PATTERN"), equalTo("foo")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index ef17935962d0e..245285259b47a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.common; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -48,7 +49,7 @@ public void testExecute() throws Exception { Map randomJsonMap = RandomDocumentPicks.randomSource(random()); XContentBuilder builder = JsonXContent.contentBuilder().map(randomJsonMap); - String randomJson = XContentHelper.convertToJson(builder.bytes(), false, XContentType.JSON); + String randomJson = XContentHelper.convertToJson(BytesReference.bytes(builder), false, XContentType.JSON); document.put(randomField, randomJson); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java index 799d378e05fc7..008613311f421 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java @@ -30,6 +30,7 @@ import com.github.mustachejava.codes.DefaultMustache; import com.github.mustachejava.codes.IterableCode; import com.github.mustachejava.codes.WriteCode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -215,7 +216,7 @@ protected Function createFunction(Object resolved) { // Do not handle as JSON return oh.stringify(resolved); } - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { throw new MustacheException("Failed to convert object to JSON", e); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index c3303cc30b528..7ab9aa6003334 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -63,7 +64,7 @@ public class RestSearchTemplateAction extends BaseRestHandler { if (parser.currentToken() == XContentParser.Token.START_OBJECT) { //convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder) try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - request.setScript(builder.copyCurrentStructure(parser).string()); + request.setScript(Strings.toString(builder.copyCurrentStructure(parser))); } catch (IOException e) { throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 22d7da774eb7c..792d993915992 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -30,6 +30,7 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.io.InputStream; public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject { @@ -83,7 +84,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } else { builder.startObject(); //we can assume the template is always json as we convert it before compiling it - builder.rawField("template_output", source, XContentType.JSON); + try (InputStream stream = source.streamInput()) { + builder.rawField("template_output", stream, XContentType.JSON); + } builder.endObject(); } return builder; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java index 91fc4db43dddd..be8be1b9c5480 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptType; @@ -61,13 +62,13 @@ public void testBasic() throws Exception { } indexRandom(true, indexRequestBuilders); - final String template = jsonBuilder().startObject() + final String template = Strings.toString(jsonBuilder().startObject() .startObject("query") .startObject("{{query_type}}") .field("{{field_name}}", "{{field_value}}") .endObject() .endObject() - .endObject().string(); + .endObject()); MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index d54063df8b4bf..ba59e9ccac002 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -30,6 +30,7 @@ import java.util.Set; import com.github.mustachejava.MustacheException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.script.ScriptEngine; @@ -248,7 +249,7 @@ public void testEmbeddedToJSON() throws Exception { .endObject(); Map ctx = - singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2()); + singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2()); assertScript("{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}", ctx, equalTo("{\"index\":\"index-1\",\"id\":1,\"type\":\"type-1\"}{\"index\":\"index-2\",\"id\":2,\"type\":\"type-2\"}")); @@ -290,7 +291,7 @@ public void testEmbeddedArrayJoin() throws Exception { .endObject(); Map ctx = - singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2()); + singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2()); assertScript("{{#join}}ctx.people.0.emails{{/join}}", ctx, equalTo("john@smith.com,john.smith@email.com,jsmith@email.com")); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index 69739ff2cb8ef..1529b655a5042 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.plugins.Plugin; @@ -317,7 +318,8 @@ public void testIndexedTemplateWithArray() throws Exception { assertAcked( client().admin().cluster().preparePutStoredScript() .setId("4") - .setContent(jsonBuilder().startObject().field("template", multiQuery).endObject().bytes(), XContentType.JSON) + .setContent(BytesReference.bytes(jsonBuilder().startObject().field("template", multiQuery).endObject()), + XContentType.JSON) ); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index 654da9ae67b15..090476703dbdd 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -31,6 +31,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; @@ -65,14 +66,14 @@ protected Collection> getPlugins() { @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { - mapperService.merge("_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("_doc", + mapperService.merge("_doc", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("_doc", INTEGER_RANGE_FIELD_NAME, "type=integer_range", LONG_RANGE_FIELD_NAME, "type=long_range", FLOAT_RANGE_FIELD_NAME, "type=float_range", DOUBLE_RANGE_FIELD_NAME, "type=double_range", DATE_RANGE_FIELD_NAME, "type=date_range", IP_RANGE_FIELD_NAME, "type=ip_range" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + ))), MapperService.MergeReason.MAPPING_UPDATE, false); } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java index 9068b2fa32afb..70d1c6169d0a6 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -21,6 +21,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -54,20 +56,20 @@ protected Collection> getPlugins() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -83,9 +85,9 @@ public void testDefaults() throws Exception { } public void testMissingScalingFactor() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); @@ -93,10 +95,10 @@ public void testMissingScalingFactor() throws IOException { } public void testIllegalScalingFactor() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", -1).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); @@ -104,20 +106,20 @@ public void testIllegalScalingFactor() throws IOException { } public void testNotIndexed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("index", false).field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -128,20 +130,20 @@ public void testNotIndexed() throws Exception { } public void testNoDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("doc_values", false).field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -152,20 +154,20 @@ public void testNoDocValues() throws Exception { } public void testStore() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("store", true).field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -181,20 +183,20 @@ public void testStore() throws Exception { } public void testCoerce() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -205,20 +207,20 @@ public void testCoerce() throws Exception { IndexableField dvField = fields[1]; assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).field("coerce", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper2.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("passed as String")); @@ -234,36 +236,36 @@ public void testIgnoreMalformed() throws Exception { } private void doTestIgnoreMalformed(String value, String exceptionMessageContains) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", value) - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", value) + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString(exceptionMessageContains)); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "scaled_float") .field("scaling_factor", 10.0).field("ignore_malformed", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", value) - .endObject() - .bytes(), + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", value) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -271,7 +273,7 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -279,20 +281,20 @@ public void testNullValue() throws IOException { .field("scaling_factor", 10.0) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -301,16 +303,16 @@ public void testNullValue() throws IOException { .field("null_value", 2.5) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -325,11 +327,11 @@ public void testNullValue() throws IOException { public void testEmptyName() throws IOException { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("") .field("type", "scaled_float") .field("scaling_factor", 10.0).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) @@ -341,14 +343,14 @@ public void testEmptyName() throws IOException { * `index_options` was deprecated and is rejected as of 7.0 */ public void testRejectIndexOptions() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "scaled_float") .field("scaling_factor", 10.0) .field("index_options", randomFrom(new String[]{"docs", "freqs", "positions", "offset"})) .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); parser.parse("type", new CompressedXContent(mapping)); assertWarnings( "index_options are deprecated for field [foo] of type [scaled_float] and will be removed in the next major version."); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java index 633f10276096c..503eaab3d8d0e 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.TokenStream; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; @@ -52,7 +53,7 @@ protected Collection> getPlugins() { } public void testMerge() throws IOException { - String stage1Mapping = XContentFactory.jsonBuilder().startObject() + String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("person") .startObject("properties") .startObject("tc") @@ -60,12 +61,12 @@ public void testMerge() throws IOException { .field("analyzer", "keyword") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - String stage2Mapping = XContentFactory.jsonBuilder().startObject() + String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("person") .startObject("properties") .startObject("tc") @@ -73,7 +74,7 @@ public void testMerge() throws IOException { .field("analyzer", "standard") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); @@ -131,7 +132,7 @@ public TokenStreamComponents createComponents(String fieldName) { public void testEmptyName() throws IOException { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("") @@ -139,7 +140,7 @@ public void testEmptyName() throws IOException { .field("analyzer", "standard") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); // Empty name not allowed in index created after 5.0 IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -167,7 +168,7 @@ public void testParseNotNullValue() throws Exception { } private DocumentMapper createIndexWithTokenCountField() throws IOException { - final String content = XContentFactory.jsonBuilder().startObject() + final String content = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("person") .startObject("properties") .startObject("test") @@ -180,16 +181,16 @@ private DocumentMapper createIndexWithTokenCountField() throws IOException { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); return createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(content)); } private SourceToParse createDocument(String fieldValue) throws Exception { - BytesReference request = XContentFactory.jsonBuilder() + BytesReference request = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("test", fieldValue) - .endObject().bytes(); + .endObject()); return SourceToParse.source("test", "person", "1", request, XContentType.JSON); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java index 6bfc9b87b78ce..f83f70cb3cadc 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.join.mapper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -45,7 +47,7 @@ protected Collection> getPlugins() { } public void testSingleLevel() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -54,7 +56,7 @@ public void testSingleLevel() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); @@ -62,39 +64,39 @@ public void testSingleLevel() throws Exception { // Doc without join ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0", - XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON)); + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)); assertNull(doc.rootDoc().getBinaryValue("join_field")); // Doc parent doc = docMapper.parse(SourceToParse.source("test", "type", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "parent") - .endObject().bytes(), XContentType.JSON)); + .endObject()), XContentType.JSON)); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child doc = docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", "1") .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Unkwnown join name MapperException exc = expectThrows(MapperParsingException.class, () -> docMapper.parse(SourceToParse.source("test", "type", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "unknown") - .endObject().bytes(), XContentType.JSON))); + .endObject()), XContentType.JSON))); assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]")); } public void testParentIdSpecifiedAsNumber() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -103,32 +105,32 @@ public void testParentIdSpecifiedAsNumber() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", 1) .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); doc = docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", 1.0) .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("1.0", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); } public void testMultipleLevels() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -138,7 +140,7 @@ public void testMultipleLevels() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); @@ -146,26 +148,26 @@ public void testMultipleLevels() throws Exception { // Doc without join ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0", - XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON)); + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)); assertNull(doc.rootDoc().getBinaryValue("join_field")); // Doc parent doc = docMapper.parse(SourceToParse.source("test", "type", "1", - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("join_field", "parent") - .endObject().bytes(), XContentType.JSON)); + .endObject()), XContentType.JSON)); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child doc = docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", "1") .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); @@ -173,44 +175,44 @@ public void testMultipleLevels() throws Exception { // Doc child missing parent MapperException exc = expectThrows(MapperParsingException.class, () -> docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "child") - .endObject().bytes(), XContentType.JSON).routing("1"))); + .endObject()), XContentType.JSON).routing("1"))); assertThat(exc.getRootCause().getMessage(), containsString("[parent] is missing for join field [join_field]")); // Doc child missing routing exc = expectThrows(MapperParsingException.class, () -> docMapper.parse(SourceToParse.source("test", "type", "2", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", "1") .endObject() - .endObject().bytes(), XContentType.JSON))); + .endObject()), XContentType.JSON))); assertThat(exc.getRootCause().getMessage(), containsString("[routing] is missing for join field [join_field]")); // Doc grand_child doc = docMapper.parse(SourceToParse.source("test", "type", "3", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "grand_child") .field("parent", "2") .endObject() - .endObject().bytes(), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON).routing("1")); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("grand_child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Unkwnown join name exc = expectThrows(MapperParsingException.class, () -> docMapper.parse(SourceToParse.source("test", "type", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "unknown") - .endObject().bytes(), XContentType.JSON))); + .endObject()), XContentType.JSON))); assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]")); } public void testUpdateRelations() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -218,21 +220,21 @@ public void testUpdateRelations() throws Exception { .array("child", "grand_child1", "grand_child2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test"); DocumentMapper docMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") .array("child", "grand_child1", "grand_child2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalStateException exc = expectThrows(IllegalStateException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE, false)); @@ -240,7 +242,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -248,7 +250,7 @@ public void testUpdateRelations() throws Exception { .field("child", "grand_child1") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalStateException exc = expectThrows(IllegalStateException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE, false)); @@ -256,7 +258,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -265,7 +267,7 @@ public void testUpdateRelations() throws Exception { .array("child", "grand_child1", "grand_child2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalStateException exc = expectThrows(IllegalStateException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE, false)); @@ -273,7 +275,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -282,7 +284,7 @@ public void testUpdateRelations() throws Exception { .field("grand_child2", "grand_grand_child") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalStateException exc = expectThrows(IllegalStateException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE, false)); @@ -290,7 +292,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -298,7 +300,7 @@ public void testUpdateRelations() throws Exception { .array("child", "grand_child1", "grand_child2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE, true); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); @@ -310,7 +312,7 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") @@ -319,7 +321,7 @@ public void testUpdateRelations() throws Exception { .array("other", "child_other1", "child_other2") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE, true); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); @@ -334,7 +336,7 @@ public void testUpdateRelations() throws Exception { } public void testInvalidJoinFieldInsideObject() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("object") .startObject("properties") .startObject("join_field") @@ -345,7 +347,7 @@ public void testInvalidJoinFieldInsideObject() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test"); MapperParsingException exc = expectThrows(MapperParsingException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), @@ -355,7 +357,7 @@ public void testInvalidJoinFieldInsideObject() throws Exception { } public void testInvalidJoinFieldInsideMultiFields() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties") .startObject("number") .field("type", "integer") .startObject("fields") @@ -367,7 +369,7 @@ public void testInvalidJoinFieldInsideMultiFields() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test"); MapperParsingException exc = expectThrows(MapperParsingException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), @@ -379,7 +381,7 @@ public void testInvalidJoinFieldInsideMultiFields() throws Exception { public void testMultipleJoinFields() throws Exception { IndexService indexService = createIndex("test"); { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -395,14 +397,14 @@ public void testMultipleJoinFields() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false)); assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]")); } { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -412,16 +414,16 @@ public void testMultipleJoinFields() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - String updateMapping = XContentFactory.jsonBuilder().startObject() + String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("another_join_field") .field("type", "join") .endObject() .endObject() - .endObject().string(); + .endObject()); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE, false)); assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]")); @@ -429,7 +431,7 @@ public void testMultipleJoinFields() throws Exception { } public void testEagerGlobalOrdinals() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -439,7 +441,7 @@ public void testEagerGlobalOrdinals() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); @@ -450,7 +452,7 @@ public void testEagerGlobalOrdinals() throws Exception { assertNotNull(service.mapperService().fullName("join_field#child")); assertTrue(service.mapperService().fullName("join_field#child").eagerGlobalOrdinals()); - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("join_field") .field("type", "join") @@ -461,7 +463,7 @@ public void testEagerGlobalOrdinals() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertFalse(service.mapperService().fullName("join_field").eagerGlobalOrdinals()); diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index 885c19c6cd45a..e299edb4c2805 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -132,7 +133,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .endObject().endObject().endObject(); mapperService.merge(TYPE, - new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE, false); } /** diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java index 1776df49e1850..708e3c28b3b09 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -112,7 +113,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .endObject().endObject().endObject(); mapperService.merge(TYPE, - new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE, false); } /** diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java index d30ddf98661de..d7306628f8b78 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; @@ -89,7 +90,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws similarity = randomFrom("classic", "BM25"); // TODO: use a single type when inner hits have been changed to work with join field, // this test randomly generates queries with inner hits - mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, + mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", @@ -97,8 +98,8 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); - mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, + ))), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, "_parent", "type=" + PARENT_TYPE, STRING_FIELD_NAME, "type=text", "custom_string", "type=text,similarity=" + similarity, @@ -107,7 +108,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + ))), MapperService.MergeReason.MAPPING_UPDATE, false); } @Override diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java index 8517348721e30..535276a35d0bb 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; @@ -80,7 +81,7 @@ protected Settings indexSettings() { protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { // TODO: use a single type when inner hits have been changed to work with join field, // this test randomly generates queries with inner hits - mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, + mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", @@ -88,8 +89,8 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); - mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, + ))), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, "_parent", "type=" + PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", @@ -98,9 +99,9 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); - mapperService.merge("just_a_type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("just_a_type" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + ))), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("just_a_type", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("just_a_type" + ))), MapperService.MergeReason.MAPPING_UPDATE, false); } /** diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java index f613e58498ace..74f3e30d63840 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.TermQuery; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; @@ -65,15 +66,15 @@ protected Settings indexSettings() { @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { - mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, + mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, STRING_FIELD_NAME, "type=text", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); - mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, + ))), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, "_parent", "type=" + PARENT_TYPE, STRING_FIELD_NAME, "type=text", INT_FIELD_NAME, "type=integer", @@ -81,7 +82,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + ))), MapperService.MergeReason.MAPPING_UPDATE, false); } @Override diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java index 5d9b03045acfe..2e2cdfb200453 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentChildTestCase.java @@ -20,6 +20,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -83,7 +84,7 @@ protected IndexRequestBuilder createIndexRequest(String index, String type, Stri protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId, XContentBuilder builder) throws IOException { - Map source = XContentHelper.convertToMap(JsonXContent.jsonXContent, builder.string(), false); + Map source = XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(builder), false); return createIndexRequest(index, type, id, parentId, source); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java index 375923ebd9ab2..43f91fc6cd0a3 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java @@ -22,18 +22,16 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DocValuesTermsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; @@ -104,7 +102,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .endObject().endObject().endObject(); mapperService.merge(TYPE, - new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE, false); } @Override diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 6ba8394e1e598..0c35876ada63d 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -416,7 +416,7 @@ public static PercolateQueryBuilder fromXContent(XContentParser parser) throws I try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.copyCurrentStructure(parser); builder.flush(); - documents.add(builder.bytes()); + documents.add(BytesReference.bytes(builder)); } } else { throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME + @@ -437,7 +437,7 @@ public static PercolateQueryBuilder fromXContent(XContentParser parser) throws I try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.copyCurrentStructure(parser); builder.flush(); - documents.add(builder.bytes()); + documents.add(BytesReference.bytes(builder)); } } else { throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME + diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index e44a36cd267dd..91f5b18a1114e 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -441,7 +441,7 @@ static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbFi try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); - byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes()); + byte[] queryBuilderAsBytes = BytesReference.toBytes(BytesReference.bytes(builder)); context.doc().add(new Field(qbField.name(), queryBuilderAsBytes, qbField.fieldType())); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 762c753ba392e..fd5894fedaff6 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -75,6 +75,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -140,7 +141,7 @@ public void init() throws Exception { IndexService indexService = createIndex(indexName, Settings.EMPTY); mapperService = indexService.mapperService(); - String mapper = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("int_field").field("type", "integer").endObject() .startObject("long_field").field("type", "long").endObject() @@ -149,13 +150,13 @@ public void init() throws Exception { .startObject("double_field").field("type", "double").endObject() .startObject("ip_field").field("type", "ip").endObject() .startObject("field").field("type", "keyword").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true); String queryField = "query_field"; - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("type") + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject(queryField).field("type", "percolator").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField); fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 122cabc79eab6..246a5e3d0b54b 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -96,12 +97,12 @@ protected Collection> getPlugins() { protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { queryField = randomAlphaOfLength(4); String docType = "_doc"; - mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType, + mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, queryField, "type=percolator" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); - mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType, + ))), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, STRING_FIELD_NAME, "type=text" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + ))), MapperService.MergeReason.MAPPING_UPDATE, false); if (mapperService.getIndexSettings().isSingleType() == false) { PercolateQueryBuilderTests.docType = docType; } @@ -339,7 +340,7 @@ private static BytesReference randomSource(Set usedFields) { XContentBuilder xContent = XContentFactory.jsonBuilder(); xContent.map(source); - return xContent.bytes(); + return BytesReference.bytes(xContent); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 2adc83127765e..4a47443b9aee1 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -42,7 +42,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.hash.MurmurHash3; @@ -141,7 +143,7 @@ public void init() throws Exception { indexService = createIndex("test"); mapperService = indexService.mapperService(); - String mapper = XContentFactory.jsonBuilder().startObject().startObject("doc") + String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc") .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("field").field("type", "text").endObject() @@ -157,15 +159,15 @@ public void init() throws Exception { .startObject("number_field6").field("type", "double").endObject() .startObject("number_field7").field("type", "ip").endObject() .startObject("date_field").field("type", "date").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, false); } private void addQueryFieldMappings() throws Exception { fieldName = randomAlphaOfLength(4); - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("doc") + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc") .startObject("properties").startObject(fieldName).field("type", "percolator").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, false); fieldType = (PercolatorFieldMapper.FieldType) mapperService.fullName(fieldName); } @@ -459,12 +461,13 @@ public void testExtractTermsAndRanges_numberFields() throws Exception { public void testPercolatorFieldMapper() throws Exception { addQueryFieldMappings(); QueryBuilder queryBuilder = termQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory - .jsonBuilder() - .startObject() - .field(fieldName, queryBuilder) - .endObject().bytes(), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + BytesReference.bytes(XContentFactory + .jsonBuilder() + .startObject() + .field(fieldName, queryBuilder) + .endObject()), + XContentType.JSON)); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name())[0].binaryValue().utf8ToString(), equalTo("field\0value")); @@ -477,11 +480,11 @@ public void testPercolatorFieldMapper() throws Exception { // add an query for which we don't extract terms from queryBuilder = rangeQuery("field").from("a").to("z"); - doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory + doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() .field(fieldName, queryBuilder) - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), @@ -504,9 +507,9 @@ public void testStoringQueries() throws Exception { for (QueryBuilder query : queries) { ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, query) - .endObject().bytes(), + .endObject()), XContentType.JSON)); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, query); @@ -517,12 +520,13 @@ public void testQueryWithRewrite() throws Exception { addQueryFieldMappings(); client().prepareIndex("remote", "doc", "1").setSource("field", "value").get(); QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "doc", "1", "field")); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory - .jsonBuilder() - .startObject() - .field(fieldName, queryBuilder) - .endObject().bytes(), - XContentType.JSON)); + ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + BytesReference.bytes(XContentFactory + .jsonBuilder() + .startObject() + .field(fieldName, queryBuilder) + .endObject()), + XContentType.JSON)); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); QueryShardContext shardContext = indexService.newQueryShardContext( randomInt(20), null, () -> { @@ -537,11 +541,11 @@ public void testQueryWithRewrite() throws Exception { public void testPercolatorFieldMapperUnMappedField() throws Exception { addQueryFieldMappings(); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory + mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() .field(fieldName, termQuery("unmapped_field", "value")) - .endObject().bytes(), + .endObject()), XContentType.JSON)); }); assertThat(exception.getCause(), instanceOf(QueryShardException.class)); @@ -551,20 +555,20 @@ public void testPercolatorFieldMapperUnMappedField() throws Exception { public void testPercolatorFieldMapper_noQuery() throws Exception { addQueryFieldMappings(); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory - .jsonBuilder() - .startObject() - .endObject() - .bytes(), + ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference + .bytes(XContentFactory + .jsonBuilder() + .startObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0)); try { - mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory + mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() .nullField(fieldName) - .endObject().bytes(), + .endObject()), XContentType.JSON)); } catch (MapperParsingException e) { assertThat(e.getDetailedMessage(), containsString("query malformed, must start with start_object")); @@ -576,9 +580,9 @@ public void testAllowNoAdditionalSettings() throws Exception { IndexService indexService = createIndex("test1", Settings.EMPTY); MapperService mapperService = indexService.mapperService(); - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("doc") + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc") .startObject("properties").startObject(fieldName).field("type", "percolator").field("index", "no").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true)); assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); @@ -587,21 +591,21 @@ public void testAllowNoAdditionalSettings() throws Exception { // multiple percolator fields are allowed in the mapping, but only one field can be used at index time. public void testMultiplePercolatorFields() throws Exception { String typeName = "doc"; - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName) .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("query_field1").field("type", "percolator").endObject() .startObject("query_field2").field("type", "percolator").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", - jsonBuilder().startObject() + BytesReference.bytes(jsonBuilder().startObject() .field("query_field1", queryBuilder) .field("query_field2", queryBuilder) - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields().size(), equalTo(14)); // also includes all other meta fields BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue(); @@ -614,7 +618,7 @@ public void testMultiplePercolatorFields() throws Exception { // percolator field can be nested under an object field, but only one query can be specified per document public void testNestedPercolatorField() throws Exception { String typeName = "doc"; - String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) + String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName) .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("object_field") @@ -624,25 +628,25 @@ public void testNestedPercolatorField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", - jsonBuilder().startObject().startObject("object_field") + BytesReference.bytes(jsonBuilder().startObject().startObject("object_field") .field("query_field", queryBuilder) - .endObject().endObject().bytes(), + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", - jsonBuilder().startObject() + BytesReference.bytes(jsonBuilder().startObject() .startArray("object_field") .startObject().field("query_field", queryBuilder).endObject() .endArray() - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); @@ -650,12 +654,12 @@ public void testNestedPercolatorField() throws Exception { MapperParsingException e = expectThrows(MapperParsingException.class, () -> { mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", - jsonBuilder().startObject() + BytesReference.bytes(jsonBuilder().startObject() .startArray("object_field") .startObject().field("query_field", queryBuilder).endObject() .startObject().field("query_field", queryBuilder).endObject() .endArray() - .endObject().bytes(), + .endObject()), XContentType.JSON)); } ); @@ -708,9 +712,9 @@ private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws I } public void testEmptyName() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("").field("type", "percolator").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = mapperService.documentMapperParser(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -735,9 +739,9 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() - .rawField(fieldName, new BytesArray(query.string()), query.contentType()) - .endObject().bytes(), + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) + .endObject()), XContentType.JSON)); BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) { @@ -773,9 +777,9 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() - .rawField(fieldName, new BytesArray(query.string()), query.contentType()) - .endObject().bytes(), + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) + .endObject()), XContentType.JSON)); querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) { @@ -859,9 +863,9 @@ public void testDuplicatedClauses() throws Exception { .must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))); ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, qb) - .endObject().bytes(), + .endObject()), XContentType.JSON)); List values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) @@ -881,9 +885,9 @@ public void testDuplicatedClauses() throws Exception { .must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4"))) .must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, qb) - .endObject().bytes(), + .endObject()), XContentType.JSON)); values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) @@ -906,9 +910,9 @@ public void testDuplicatedClauses() throws Exception { .should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4"))) .should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", - XContentFactory.jsonBuilder().startObject() + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, qb) - .endObject().bytes(), + .endObject()), XContentType.JSON)); values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index db2d85b9e39e9..3e11f91c4bcea 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -85,7 +85,7 @@ public void testPercolatorQuery() throws Exception { ).endObject()).get(); client().admin().indices().prepareRefresh().get(); - BytesReference source = jsonBuilder().startObject().endObject().bytes(); + BytesReference source = BytesReference.bytes(jsonBuilder().startObject().endObject()); logger.info("percolating empty doc"); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) @@ -93,7 +93,7 @@ public void testPercolatorQuery() throws Exception { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - source = jsonBuilder().startObject().field("field1", "value").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()); logger.info("percolating doc with 1 field"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) @@ -105,7 +105,7 @@ public void testPercolatorQuery() throws Exception { assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); - source = jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()); logger.info("percolating doc with 2 fields"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) @@ -122,8 +122,8 @@ public void testPercolatorQuery() throws Exception { logger.info("percolating doc with 2 fields"); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", Arrays.asList( - jsonBuilder().startObject().field("field1", "value").endObject().bytes(), - jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes() + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()) ), XContentType.JSON)) .addSort("_uid", SortOrder.ASC) .get(); @@ -189,7 +189,7 @@ public void testPercolatorRangeQueries() throws Exception { client().admin().indices().prepareRefresh().get(); // Test long range: - BytesReference source = jsonBuilder().startObject().field("field1", 12).endObject().bytes(); + BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 12).endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -198,7 +198,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); - source = jsonBuilder().startObject().field("field1", 11).endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 11).endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -206,7 +206,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("1")); // Test double range: - source = jsonBuilder().startObject().field("field2", 12).endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 12).endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -214,7 +214,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("6")); assertThat(response.getHits().getAt(1).getId(), equalTo("4")); - source = jsonBuilder().startObject().field("field2", 11).endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 11).endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -222,7 +222,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("4")); // Test IP range: - source = jsonBuilder().startObject().field("field3", "192.168.1.5").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.5").endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -230,7 +230,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("9")); assertThat(response.getHits().getAt(1).getId(), equalTo("7")); - source = jsonBuilder().startObject().field("field3", "192.168.1.4").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.4").endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -238,7 +238,7 @@ public void testPercolatorRangeQueries() throws Exception { assertThat(response.getHits().getAt(0).getId(), equalTo("7")); // Test date range: - source = jsonBuilder().startObject().field("field4", "2016-05-15").endObject().bytes(); + source = BytesReference.bytes(jsonBuilder().startObject().field("field4", "2016-05-15").endObject()); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); @@ -267,9 +267,9 @@ public void testPercolatorGeoQueries() throws Exception { .endObject()).get(); refresh(); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .startObject("field1").field("lat", 52.20).field("lon", 4.51).endObject() - .endObject().bytes(); + .endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("_id", SortOrder.ASC) @@ -402,10 +402,10 @@ public void testPercolatorSpecificQueries() throws Exception { .get(); client().admin().indices().prepareRefresh().get(); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "the quick brown fox jumps over the lazy dog") .field("field2", "the quick brown fox falls down into the well") - .endObject().bytes(); + .endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("_uid", SortOrder.ASC) @@ -449,9 +449,9 @@ public void testPercolatorQueryWithHighlighting() throws Exception { .execute().actionGet(); client().admin().indices().prepareRefresh().get(); - BytesReference document = jsonBuilder().startObject() + BytesReference document = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "The quick brown fox jumps over the lazy dog") - .endObject().bytes(); + .endObject()); SearchResponse searchResponse = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON)) .highlighter(new HighlightBuilder().field("field1")) @@ -470,12 +470,12 @@ public void testPercolatorQueryWithHighlighting() throws Exception { assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy dog")); - BytesReference document1 = jsonBuilder().startObject() + BytesReference document1 = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "The quick brown fox jumps") - .endObject().bytes(); - BytesReference document2 = jsonBuilder().startObject() + .endObject()); + BytesReference document2 = BytesReference.bytes(jsonBuilder().startObject() .field("field1", "over the lazy dog") - .endObject().bytes(); + .endObject()); searchResponse = client().prepareSearch() .setQuery(boolQuery() .should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1")) @@ -500,10 +500,10 @@ public void testPercolatorQueryWithHighlighting() throws Exception { searchResponse = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", Arrays.asList( - jsonBuilder().startObject().field("field1", "dog").endObject().bytes(), - jsonBuilder().startObject().field("field1", "fox").endObject().bytes(), - jsonBuilder().startObject().field("field1", "jumps").endObject().bytes(), - jsonBuilder().startObject().field("field1", "brown fox").endObject().bytes() + BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) ), XContentType.JSON)) .highlighter(new HighlightBuilder().field("field1")) .addSort("_uid", SortOrder.ASC) @@ -537,12 +537,12 @@ public void testPercolatorQueryWithHighlighting() throws Exception { searchResponse = client().prepareSearch() .setQuery(boolQuery() .should(new PercolateQueryBuilder("query", Arrays.asList( - jsonBuilder().startObject().field("field1", "dog").endObject().bytes(), - jsonBuilder().startObject().field("field1", "fox").endObject().bytes() + BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()) ), XContentType.JSON).setName("query1")) .should(new PercolateQueryBuilder("query", Arrays.asList( - jsonBuilder().startObject().field("field1", "jumps").endObject().bytes(), - jsonBuilder().startObject().field("field1", "brown fox").endObject().bytes() + BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) ), XContentType.JSON).setName("query2")) ) .highlighter(new HighlightBuilder().field("field1")) @@ -664,7 +664,7 @@ public void testWithMultiplePercolatorFields() throws Exception { .get(); client().admin().indices().prepareRefresh().get(); - BytesReference source = jsonBuilder().startObject().field("field", "value").endObject().bytes(); + BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field", "value").endObject()); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON)) .setIndices("test1") @@ -718,13 +718,13 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "virginia potts").endObject() .startObject().field("name", "tony stark").endObject() .endArray() - .endObject().bytes(), XContentType.JSON)) + .endObject()), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); assertHitCount(response, 1); @@ -732,20 +732,20 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "notstark") .startArray("employee") .startObject().field("name", "virginia stark").endObject() .startObject().field("name", "tony stark").endObject() .endArray() - .endObject().bytes(), XContentType.JSON)) + .endObject()), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); assertHitCount(response, 0); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", - XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject().bytes(), + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject()), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); @@ -753,20 +753,20 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", Arrays.asList( - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "virginia potts").endObject() .startObject().field("name", "tony stark").endObject() .endArray() - .endObject().bytes(), - XContentFactory.jsonBuilder() + .endObject()), + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "peter parker").endObject() .startObject().field("name", "virginia potts").endObject() .endArray() - .endObject().bytes() + .endObject()) ), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); @@ -803,16 +803,16 @@ public void testPercolatorQueryViaMultiSearch() throws Exception { MultiSearchResponse response = client().prepareMultiSearch() .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", - jsonBuilder().startObject().field("field1", "b").endObject().bytes(), XContentType.JSON))) + BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), XContentType.JSON))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", - yamlBuilder().startObject().field("field1", "c").endObject().bytes(), XContentType.YAML))) + BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()), XContentType.YAML))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", - smileBuilder().startObject().field("field1", "b c").endObject().bytes(), XContentType.SMILE))) + BytesReference.bytes(smileBuilder().startObject().field("field1", "b c").endObject()), XContentType.SMILE))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", - jsonBuilder().startObject().field("field1", "d").endObject().bytes(), XContentType.JSON))) + BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()), XContentType.JSON))) .add(client().prepareSearch("test") .setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null))) .add(client().prepareSearch("test") // non existing doc, so error element diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java index 15a33f2090b9f..bc23a615c583d 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java @@ -22,6 +22,8 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.bytes.BytesReference; + import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -80,7 +82,7 @@ public void testPercolateScriptQuery() throws IOException { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .execute().actionGet(); SearchResponse response = client().prepareSearch("index") - .setQuery(new PercolateQueryBuilder("query", jsonBuilder().startObject().field("field1", "b").endObject().bytes(), + .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), XContentType.JSON)) .get(); assertHitCount(response, 1); @@ -109,13 +111,13 @@ public void testPercolateQueryWithNestedDocuments_doNotLeakBitsetCacheEntries() for (int i = 0; i < 32; i++) { SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", - XContentFactory.jsonBuilder() + BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("companyname", "stark") .startArray("employee") .startObject().field("name", "virginia potts").endObject() .startObject().field("name", "tony stark").endObject() .endArray() - .endObject().bytes(), XContentType.JSON)) + .endObject()), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) // size 0, because other wise load bitsets for normal document in FetchPhase#findRootDocumentIfNested(...) .setSize(0) @@ -193,7 +195,7 @@ public void testPercolateQueryWithNestedDocuments_doLeakFieldDataCacheEntries() doc.endObject(); for (int i = 0; i < 32; i++) { SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", doc.bytes(), XContentType.JSON)) + .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(doc), XContentType.JSON)) .addSort("_doc", SortOrder.ASC) .get(); assertHitCount(response, 1); @@ -213,8 +215,9 @@ public void testMapUnmappedFieldAsText() throws IOException { client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch("test") - .setQuery(new PercolateQueryBuilder("query", jsonBuilder().startObject().field("field1", "value").endObject().bytes(), - XContentType.JSON)) + .setQuery(new PercolateQueryBuilder("query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), + XContentType.JSON)) .get(); assertHitCount(response, 1); assertSearchHits(response, "1"); @@ -229,8 +232,9 @@ public void testMapUnmappedFieldAsString() throws IOException { client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch("test") - .setQuery(new PercolateQueryBuilder("query", jsonBuilder().startObject().field("field1", "value").endObject().bytes(), - XContentType.JSON)) + .setQuery(new PercolateQueryBuilder("query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), + XContentType.JSON)) .get(); assertHitCount(response, 1); assertSearchHits(response, "1"); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index 26492d3566fc4..e4fe48482377c 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -152,7 +152,7 @@ public void testToXContent() throws IOException { RankEvalResponse response = new RankEvalResponse(0.123, Collections.singletonMap("coffee_query", coffeeQueryQuality), Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg"))); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - String xContent = response.toXContent(builder, ToXContent.EMPTY_PARAMS).bytes().utf8ToString(); + String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString(); assertEquals(("{" + " \"quality_level\": 0.123," + " \"details\": {" + diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java index 1c9c9bc6b500c..b4905425f4093 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.rankeval; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -85,7 +86,7 @@ private static RankEvalSpec createTestItem() throws IOException { builder.startObject(); builder.field("field", randomAlphaOfLengthBetween(1, 5)); builder.endObject(); - script = builder.string(); + script = Strings.toString(builder); } templates = new HashSet<>(); @@ -115,7 +116,7 @@ private static RankEvalSpec createTestItem() throws IOException { public void testXContentRoundtrip() throws IOException { RankEvalSpec testItem = createTestItem(); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, shuffled.bytes())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(shuffled))) { RankEvalSpec parsedItem = RankEvalSpec.parse(parser); // indices come from URL parameters, so they don't survive xContent roundtrip diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java index ad1385541a6b3..230828ed3fca9 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -21,8 +21,8 @@ import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -91,7 +91,7 @@ private XContentParser extractRequestSpecificFields(RestRequest restRequest, } } return parser.contentType().xContent().createParser(parser.getXContentRegistry(), - parser.getDeprecationHandler(), builder.map(body).bytes().streamInput()); + parser.getDeprecationHandler(), BytesReference.bytes(builder.map(body)).streamInput()); } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index 7400889439202..2e85d567743ee 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContent; @@ -75,7 +74,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler source) throws XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); Object query = source.remove("query"); if (query == null) { - return matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS).bytes(); + return BytesReference.bytes(matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS)); } if (!(query instanceof Map)) { throw new IllegalArgumentException("Expected [query] to be an object but was [" + query + "]"); } @SuppressWarnings("unchecked") Map map = (Map) query; - return builder.map(map).bytes(); + return BytesReference.bytes(builder.map(map)); } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 2ffa07cb8be56..650cf5000a745 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -37,8 +37,8 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -345,7 +345,7 @@ protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) XContentBuilder builder = XContentBuilder.builder(mainRequestXContentType.xContent())) { parser.nextToken(); builder.copyCurrentStructure(parser); - index.source(builder.bytes(), builder.contentType()); + index.source(BytesReference.bytes(builder), builder.contentType()); } catch (IOException e) { throw new UncheckedIOException("failed to convert hit from " + sourceXContentType + " to " + mainRequestXContentType, e); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java index ccb19fd62c814..3f6f4bcbc4fe3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java @@ -155,7 +155,7 @@ static HttpEntity initialSearchEntity(SearchRequest searchRequest, BytesReferenc } entity.endObject(); - BytesRef bytes = entity.bytes().toBytesRef(); + BytesRef bytes = BytesReference.bytes(entity).toBytesRef(); return new ByteArrayEntity(bytes.bytes, bytes.offset, bytes.length, ContentType.APPLICATION_JSON); } catch (IOException e) { throw new ElasticsearchException("unexpected error building entity", e); @@ -209,9 +209,9 @@ static HttpEntity scrollEntity(String scroll, Version remoteVersion) { return new StringEntity(scroll, ContentType.TEXT_PLAIN); } try (XContentBuilder entity = JsonXContent.contentBuilder()) { - return new StringEntity(entity.startObject() + return new StringEntity(Strings.toString(entity.startObject() .field("scroll_id", scroll) - .endObject().string(), ContentType.APPLICATION_JSON); + .endObject()), ContentType.APPLICATION_JSON); } catch (IOException e) { throw new ElasticsearchException("failed to build scroll entity", e); } @@ -223,9 +223,9 @@ static HttpEntity clearScrollEntity(String scroll, Version remoteVersion) { return new StringEntity(scroll, ContentType.TEXT_PLAIN); } try (XContentBuilder entity = JsonXContent.contentBuilder()) { - return new StringEntity(entity.startObject() + return new StringEntity(Strings.toString(entity.startObject() .array("scroll_id", scroll) - .endObject().string(), ContentType.APPLICATION_JSON); + .endObject()), ContentType.APPLICATION_JSON); } catch (IOException e) { throw new ElasticsearchException("failed to build clear scroll entity", e); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java index d9a897026d293..d18e9c85bcdab 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.reindex.remote; import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.reindex.ScrollableHitSource.BasicHit; import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; import org.elasticsearch.index.reindex.ScrollableHitSource.Response; @@ -78,7 +79,7 @@ private RemoteResponseParsers() {} try (XContentBuilder b = XContentBuilder.builder(s.xContent())) { b.copyCurrentStructure(p); // a hack but this lets us get the right xcontent type to go with the source - return new Tuple<>(b.bytes(), s); + return new Tuple<>(BytesReference.bytes(b), s); } } catch (IOException e) { throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java index 9215459c1ce93..1c33ccdaaa289 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -118,7 +118,7 @@ public void testReindexFromRemoteRequestParsing() throws IOException { b.endObject(); } b.endObject(); - request = b.bytes(); + request = BytesReference.bytes(b); } try (XContentParser p = createParser(JsonXContent.jsonXContent, request)) { ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest()); @@ -144,7 +144,7 @@ public void testPipelineQueryParameterIsError() throws IOException { body.endObject(); } body.endObject(); - request.withContent(body.bytes(), body.contentType()); + request.withContent(BytesReference.bytes(body), body.contentType()); } request.withParams(singletonMap("pipeline", "doesn't matter")); Exception e = expectThrows(IllegalArgumentException.class, () -> action.buildRequest(request.build())); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java index d1a5c15a29c22..20b18ebdaddda 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -22,6 +22,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.rest.ESRestTestCase; import org.hamcrest.Matcher; @@ -57,7 +58,7 @@ private void createTestDoc(final String indexName, final String typeName) throws } builder.endObject(); client().performRequest("PUT", "/" + indexName + "/" + typeName + "/" + "1", emptyMap(), - new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON)); } } @@ -108,7 +109,8 @@ public void testAliasExists() throws IOException { } builder.endObject(); - client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); + client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(Strings.toString(builder), + ContentType.APPLICATION_JSON)); headTestCase("/_alias/test_alias", emptyMap(), greaterThan(0)); headTestCase("/test/_alias/test_alias", emptyMap(), greaterThan(0)); } @@ -134,7 +136,7 @@ public void testTemplateExists() throws IOException { builder.endObject(); client().performRequest("PUT", "/_template/template", emptyMap(), - new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON)); headTestCase("/_template/template", emptyMap(), greaterThan(0)); } } @@ -162,7 +164,8 @@ public void testGetSourceAction() throws IOException { builder.endObject(); } builder.endObject(); - client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); + client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(Strings.toString(builder), + ContentType.APPLICATION_JSON)); createTestDoc("test-no-source", "test-no-source"); headTestCase("/test-no-source/test-no-source/1/_source", emptyMap(), NOT_FOUND.getStatus(), equalTo(0)); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java index 5576564539c5b..e185570fa0693 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java @@ -29,6 +29,8 @@ import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -64,19 +66,19 @@ public void setup() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -108,20 +110,20 @@ public void testBackCompat() throws Exception { indexService = createIndex("oldindex", Settings.builder().put("index.version.created", Version.V_5_5_0).build()); parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_all").field("enabled", false).endObject() .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -150,44 +152,44 @@ public void testBackCompat() throws Exception { } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("null_value", "1234").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); Collator collator = Collator.getInstance(ULocale.ROOT); @@ -200,20 +202,20 @@ public void testNullValue() throws IOException { } public void testEnableStore() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -222,20 +224,20 @@ public void testEnableStore() throws IOException { } public void testDisableIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -245,20 +247,20 @@ public void testDisableIndex() throws IOException { } public void testDisableDocValues() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -267,19 +269,19 @@ public void testDisableDocValues() throws IOException { } public void testMultipleValues() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", Arrays.asList("1234", "5678")) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", Arrays.asList("1234", "5678")) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -329,20 +331,20 @@ public void testMultipleValues() throws IOException { } public void testIndexOptions() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("index_options", "freqs").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -350,10 +352,10 @@ public void testIndexOptions() throws IOException { assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions()); for (String indexOptions : Arrays.asList("positions", "offsets")) { - final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + final String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("index_options", indexOptions).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping2))); assertEquals("The [" + FIELD_TYPE + "] field does not support positions, got [index_options]=" + indexOptions, @@ -362,20 +364,20 @@ public void testIndexOptions() throws IOException { } public void testEnableNorms() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE) .field("norms", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -384,22 +386,22 @@ public void testEnableNorms() throws IOException { } public void testCollator() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", FIELD_TYPE) .field("language", "tr") .field("strength", "primary") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "I WÄ°LL USE TURKÄ°SH CASING") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "I WÄ°LL USE TURKÄ°SH CASING") + .endObject()), XContentType.JSON)); Collator collator = Collator.getInstance(new ULocale("tr")); @@ -429,19 +431,19 @@ public void testCollator() throws IOException { } public void testUpdateCollator() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", FIELD_TYPE) .field("language", "tr") .field("strength", "primary") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, randomBoolean()); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", FIELD_TYPE) .field("language", "en") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, randomBoolean())); diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index ad5f06b6ee3f1..1df613ab3f9b3 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -22,6 +22,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -71,15 +73,15 @@ protected Collection> getPlugins() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); assertNotNull(fields); @@ -90,11 +92,11 @@ public void testDefaults() throws Exception { } public void testDocValuesSettingNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") .field("doc_values", false) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); try { parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); @@ -103,11 +105,11 @@ public void testDocValuesSettingNotAllowed() throws Exception { } // even setting to the default is not allowed, the setting is invalid - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") .field("doc_values", true) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); try { parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); @@ -117,11 +119,11 @@ public void testDocValuesSettingNotAllowed() throws Exception { } public void testIndexSettingNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") .field("index", "not_analyzed") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); try { parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); @@ -130,11 +132,11 @@ public void testIndexSettingNotAllowed() throws Exception { } // even setting to the default is not allowed, the setting is invalid - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "murmur3") .field("index", "no") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); try { parser.parse("type", new CompressedXContent(mapping)); fail("expected a mapper parsing exception"); @@ -144,10 +146,10 @@ public void testIndexSettingNotAllowed() throws Exception { } public void testEmptyName() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("") .field("type", "murmur3") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index 2cde1b1bd07d2..e710a5971173e 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -21,8 +21,7 @@ import java.util.Collection; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -30,11 +29,9 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.plugin.mapper.MapperSizePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -42,7 +39,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.instanceOf; import org.apache.lucene.index.IndexableField; @@ -56,11 +52,11 @@ public void testSizeEnabled() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); DocumentMapper docMapper = service.mapperService().documentMapper("type"); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + BytesReference source = BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); boolean stored = false; @@ -77,11 +73,11 @@ public void testSizeDisabled() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false"); DocumentMapper docMapper = service.mapperService().documentMapper("type"); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + BytesReference source = BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); @@ -91,11 +87,11 @@ public void testSizeNotSet() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type"); DocumentMapper docMapper = service.mapperService().documentMapper("type"); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + BytesReference source = BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); @@ -106,9 +102,9 @@ public void testThatDisablingWorksWhenMerging() throws Exception { DocumentMapper docMapper = service.mapperService().documentMapper("type"); assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true)); - String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String disabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); docMapper = service.mapperService().merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTests.java index f018bf7416835..9e2febb987416 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettingsFilterTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cloud.azure.storage; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; @@ -64,7 +65,7 @@ public void testSettingsFiltering() throws IOException { xContentBuilder.startObject(); settings.toXContent(xContentBuilder, request); xContentBuilder.endObject(); - String filteredSettingsString = xContentBuilder.string(); + String filteredSettingsString = Strings.toString(xContentBuilder); filteredSettings = Settings.builder().loadFromSource(filteredSettingsString, xContentBuilder.contentType()).build(); assertThat(filteredSettings.keySet(), contains("cloud.azure.storage.azure1.default")); } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 6610895e1f497..2330e230f4505 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -19,6 +19,7 @@ package org.elasticsearch.repositories.gcs; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -522,7 +523,7 @@ static class Response { */ private static Response newResponse(final RestStatus status, final Map headers, final XContentBuilder xContentBuilder) { try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - xContentBuilder.bytes().writeTo(out); + BytesReference.bytes(xContentBuilder).writeTo(out); return new Response(status, headers, XContentType.JSON.mediaType(), out.toByteArray()); } catch (IOException e) { return newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage()); @@ -548,7 +549,7 @@ private static Response newError(final RestStatus status, final String message) .endArray() .endObject() .endObject(); - builder.bytes().writeTo(out); + BytesReference.bytes(builder).writeTo(out); } return new Response(status, emptyMap(), XContentType.JSON.mediaType(), out.toByteArray()); } catch (IOException e) { diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 5d29243a83c97..6ea864b9084f3 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -43,6 +43,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -306,7 +307,7 @@ private static HttpEntity buildUpdateSettingsRequestBody(Map set builder.endObject(); } builder.endObject(); - requestBody = builder.string(); + requestBody = Strings.toString(builder); } return new NStringEntity(requestBody, ContentType.APPLICATION_JSON); } diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index d68acab7e2503..5e17769398c49 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -136,7 +137,7 @@ public void testSearch() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); count = randomIntBetween(2000, 3000); byte[] randomByteArray = new byte[16]; @@ -199,7 +200,7 @@ public void testNewReplicasWork() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); int numDocs = randomIntBetween(2000, 3000); indexRandomDocuments(numDocs, true, false, i -> { @@ -278,7 +279,7 @@ public void testAliasWithBadName() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); String aliasName = "%23" + index; // %23 == # client().performRequest("PUT", "/" + index + "/_alias/" + aliasName); @@ -326,7 +327,7 @@ public void testClusterState() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); client().performRequest("PUT", "/" + index); } @@ -377,7 +378,7 @@ public void testShrink() throws IOException { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); numDocs = randomIntBetween(512, 1024); indexRandomDocuments(numDocs, true, true, i -> { @@ -444,7 +445,7 @@ public void testShrinkAfterUpgrade() throws IOException { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); numDocs = randomIntBetween(512, 1024); indexRandomDocuments(numDocs, true, true, i -> { @@ -834,7 +835,7 @@ public void testSnapshotRestore() throws IOException { } templateBuilder.endObject().endObject(); client().performRequest("PUT", "/_template/test_template", emptyMap(), - new StringEntity(templateBuilder.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(templateBuilder), ContentType.APPLICATION_JSON)); if (runningAgainstOldCluster) { // Create the repo @@ -848,7 +849,7 @@ public void testSnapshotRestore() throws IOException { } repoConfig.endObject(); client().performRequest("PUT", "/_snapshot/repo", emptyMap(), - new StringEntity(repoConfig.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(repoConfig), ContentType.APPLICATION_JSON)); } client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"), @@ -873,7 +874,7 @@ public void testHistoryUUIDIsAdded() throws Exception { } mappingsAndSettings.endObject(); client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); } else { Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards")); List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); @@ -917,7 +918,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion restoreCommand.field("rename_replacement", "restored_" + index); restoreCommand.endObject(); client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"), - new StringEntity(restoreCommand.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(restoreCommand), ContentType.APPLICATION_JSON)); // Make sure search finds all documents String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0"))); @@ -995,7 +996,7 @@ private void indexRandomDocuments(int count, boolean flushAllowed, boolean saveI for (int i = 0; i < count; i++) { logger.debug("Indexing document [{}]", i); client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(), - new StringEntity(docSupplier.apply(i).string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(docSupplier.apply(i)), ContentType.APPLICATION_JSON)); if (rarely()) { refresh(); } @@ -1020,7 +1021,7 @@ private void saveInfoDocument(String type, String value) throws IOException { // Only create the first version so we know how many documents are created when the index is first created Map params = singletonMap("op_type", "create"); client().performRequest("PUT", "/info/doc/" + index + "_" + type, params, - new StringEntity(infoDoc.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(infoDoc), ContentType.APPLICATION_JSON)); } private String loadInfoDocument(String type) throws IOException { diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java index 8d4efd208d197..73aa02fd6ccb9 100644 --- a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java +++ b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.seqno.SeqNoStats; @@ -241,15 +242,15 @@ public void testUpdateSnapshotStatus() throws Exception { logger.info("cluster discovered: {}", nodes.toString()); // Create the repository before taking the snapshot. - String repoConfig = JsonXContent.contentBuilder() - .startObject() - .field("type", "fs") - .startObject("settings") - .field("compress", randomBoolean()) - .field("location", System.getProperty("tests.path.repo")) - .endObject() - .endObject() - .string(); + String repoConfig = Strings + .toString(JsonXContent.contentBuilder() + .startObject() + .field("type", "fs") + .startObject("settings") + .field("compress", randomBoolean()) + .field("location", System.getProperty("tests.path.repo")) + .endObject() + .endObject()); assertOK( client().performRequest("PUT", "/_snapshot/repo", emptyMap(), diff --git a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java b/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java index 74878814f08df..29b3deb1cb5de 100644 --- a/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java +++ b/qa/query-builder-bwc/src/test/java/org/elasticsearch/bwc/QueryBuilderBWCIT.java @@ -25,6 +25,7 @@ import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -189,7 +190,7 @@ public void testQueryBuilderBWC() throws Exception { } mappingsAndSettings.endObject(); Response rsp = client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); + new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); assertEquals(200, rsp.getStatusLine().getStatusCode()); for (int i = 0; i < CANDIDATES.size(); i++) { diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java index 749c03598a378..9d05ef3f05db2 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; @@ -103,12 +104,12 @@ protected Collection> nodePlugins() { @Before public void createIndices() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("location").field("type", "geo_shape").endObject() .startObject("name").field("type", "text").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings settings = Settings.builder() .put(indexSettings()) diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java index 948f573a05c8a..a795c295d2b1c 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java @@ -23,6 +23,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.settings.Setting; @@ -210,7 +211,7 @@ private HttpEntity buildSettingsRequest(List> settings, boolean builder.endArray().endObject(); - return new StringEntity(builder.string(), ContentType.APPLICATION_JSON); + return new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); } } diff --git a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java index 72d9d1b74b49d..46fafebeb4e2d 100644 --- a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java +++ b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterModule; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -76,7 +77,7 @@ public void testTransportClient() throws URISyntaxException, IOException { builder.endArray(); } builder.endObject(); - body = builder.string(); + body = Strings.toString(builder); } put.setEntity(new StringEntity(body, ContentType.APPLICATION_JSON)); try (CloseableHttpResponse response = client.execute(put)) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java index a7f4ea25fdbee..952589766773f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -23,10 +23,10 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -130,7 +130,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java index a9ff7a4c67b9c..78b33021a4b5c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -72,7 +73,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java index 24fa2817b1e3b..f84ccb738df03 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java @@ -22,9 +22,9 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -77,10 +77,10 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequest.java index 9db3fac299edf..ad81302918eb3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequest.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -161,7 +162,7 @@ public PutRepositoryRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), builder.contentType()); + settings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java index f282cedff3608..38d3a9d5caf54 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -76,7 +77,7 @@ public ActionRequestValidationException validate() { /** * Sets the value of "flat_settings". * Used only by the high-level REST client. - * + * * @param flatSettings * value of "flat_settings" flag to be set * @return this request @@ -89,7 +90,7 @@ public ClusterUpdateSettingsRequest flatSettings(boolean flatSettings) { /** * Return settings in flat format. * Used only by the high-level REST client. - * + * * @return true if settings need to be returned in flat format; false otherwise. */ public boolean flatSettings() { @@ -136,7 +137,7 @@ public ClusterUpdateSettingsRequest transientSettings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - transientSettings(builder.string(), builder.contentType()); + transientSettings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -175,7 +176,7 @@ public ClusterUpdateSettingsRequest persistentSettings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - persistentSettings(builder.string(), builder.contentType()); + persistentSettings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 5f31533f5ed07..c71eecb30b870 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -313,7 +313,7 @@ public CreateSnapshotRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), builder.contentType()); + settings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 57fb018ae1881..7661ecde0e3ec 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -338,7 +338,7 @@ public RestoreSnapshotRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), builder.contentType()); + settings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -450,7 +450,7 @@ public RestoreSnapshotRequest indexSettings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - indexSettings(builder.string(), builder.contentType()); + indexSettings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java index b8806daaec6ca..469106c9a6102 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -130,7 +131,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java index 10f544ce3abd0..bd3c77cdb264a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -35,8 +36,8 @@ import org.elasticsearch.index.query.QueryBuilder; import java.io.IOException; +import java.io.InputStream; import java.util.Map; -import java.util.Objects; /** * Represents an alias, to be associated with an index @@ -100,7 +101,7 @@ public Alias filter(Map filter) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(filter); - this.filter = builder.string(); + this.filter = Strings.toString(builder); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e); @@ -119,7 +120,7 @@ public Alias filter(QueryBuilder filterBuilder) { XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - this.filter = builder.string(); + this.filter = Strings.toString(builder); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to build json for alias request", e); @@ -227,7 +228,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(name); if (filter != null) { - builder.rawField(FILTER.getPreferredName(), new BytesArray(filter), XContentType.JSON); + try (InputStream stream = new BytesArray(filter).streamInput()) { + builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON); + } } if (indexRouting != null && indexRouting.equals(searchRouting)) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index 61be2778845ac..6332f50c1452e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.query.QueryBuilder; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -377,7 +378,7 @@ public AliasActions filter(Map filter) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(filter); - this.filter = builder.string(); + this.filter = Strings.toString(builder); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e); @@ -393,7 +394,7 @@ public AliasActions filter(QueryBuilder filter) { XContentBuilder builder = XContentFactory.jsonBuilder(); filter.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - this.filter = builder.string(); + this.filter = Strings.toString(builder); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to build json for alias request", e); @@ -432,7 +433,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.array(ALIASES.getPreferredName(), aliases); } if (false == Strings.isEmpty(filter)) { - builder.rawField(FILTER.getPreferredName(), new BytesArray(filter), XContentType.JSON); + try (InputStream stream = new BytesArray(filter).streamInput()) { + builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON); + } } if (false == Strings.isEmpty(routing)) { builder.field(ROUTING.getPreferredName(), routing); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java index 9afdf57ebafde..d9c018848d7e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java @@ -78,7 +78,7 @@ public static class NameOrDefinition implements Writeable { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(definition); - this.definition = Settings.builder().loadFromSource(builder.string(), builder.contentType()).build(); + this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build(); } catch (IOException e) { throw new IllegalArgumentException("Failed to parse [" + definition + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 4cce82d7e4d31..3dec708bfae2a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; @@ -48,6 +49,7 @@ import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; +import java.io.InputStream; import java.io.UncheckedIOException; import java.util.HashMap; import java.util.HashSet; @@ -182,11 +184,7 @@ public CreateIndexRequest settings(String source, XContentType xContentType) { * Allows to set the settings using a json builder. */ public CreateIndexRequest settings(XContentBuilder builder) { - try { - settings(builder.string(), builder.contentType()); - } catch (IOException e) { - throw new ElasticsearchGenerationException("Failed to generate json settings from builder", e); - } + settings(Strings.toString(builder), builder.contentType()); return this; } @@ -198,7 +196,7 @@ public CreateIndexRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), XContentType.JSON); + settings(Strings.toString(builder), XContentType.JSON); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -251,7 +249,7 @@ public CreateIndexRequest cause(String cause) { * @param source The mapping source */ public CreateIndexRequest mapping(String type, XContentBuilder source) { - return mapping(type, source.bytes(), source.contentType()); + return mapping(type, BytesReference.bytes(source), source.contentType()); } /** @@ -295,7 +293,7 @@ public CreateIndexRequest aliases(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - return aliases(builder.bytes()); + return aliases(BytesReference.bytes(builder)); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -305,7 +303,7 @@ public CreateIndexRequest aliases(Map source) { * Sets the aliases that will be associated with the index when it gets created */ public CreateIndexRequest aliases(XContentBuilder source) { - return aliases(source.bytes()); + return aliases(BytesReference.bytes(source)); } /** @@ -352,7 +350,7 @@ public CreateIndexRequest source(String source, XContentType xContentType) { * Sets the settings and mappings as a single source. */ public CreateIndexRequest source(XContentBuilder source) { - return source(source.bytes(), source.contentType()); + return source(BytesReference.bytes(source), source.contentType()); } /** @@ -553,7 +551,9 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t builder.startObject(MAPPINGS.getPreferredName()); for (Map.Entry entry : mappings.entrySet()) { - builder.rawField(entry.getKey(), new BytesArray(entry.getValue()), XContentType.JSON); + try (InputStream stream = new BytesArray(entry.getValue()).streamInput()) { + builder.rawField(entry.getKey(), stream, XContentType.JSON); + } } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index 2dc27317c78e7..d837c1cbd199b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.Mapper; import java.io.IOException; +import java.io.InputStream; import java.util.HashMap; import java.util.Map; @@ -127,7 +128,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (params.paramAsBoolean("pretty", false)) { builder.field("mapping", sourceAsMap()); } else { - builder.rawField("mapping", source, XContentType.JSON); + try (InputStream stream = source.streamInput()) { + builder.rawField("mapping", stream, XContentType.JSON); + } } return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index 7d3a87c64e7c4..916a075b7c81b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -251,11 +251,7 @@ public static XContentBuilder buildFromSimplifiedDef(String type, Object... sour * The mapping source definition. */ public PutMappingRequest source(XContentBuilder mappingBuilder) { - try { - return source(mappingBuilder.string(), mappingBuilder.contentType()); - } catch (IOException e) { - throw new IllegalArgumentException("Failed to build json for mapping request", e); - } + return source(Strings.toString(mappingBuilder), mappingBuilder.contentType()); } /** @@ -266,7 +262,7 @@ public PutMappingRequest source(Map mappingSource) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(mappingSource); - return source(builder.string(), XContentType.JSON); + return source(Strings.toString(builder), XContentType.JSON); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + mappingSource + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java index dcea5673cb51d..686bf8a74b85d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -153,7 +154,7 @@ public UpdateSettingsRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), builder.contentType()); + settings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 24a0e10e86695..46aef007e6bab 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -213,7 +214,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 766c3323c9409..83c3f474e6616 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; @@ -196,7 +197,7 @@ public PutIndexTemplateRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); - settings(builder.string(), XContentType.JSON); + settings(Strings.toString(builder), XContentType.JSON); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -237,7 +238,7 @@ public String cause() { * @param source The mapping source */ public PutIndexTemplateRequest mapping(String type, XContentBuilder source) { - return mapping(type, source.bytes(), source.contentType()); + return mapping(type, BytesReference.bytes(source), source.contentType()); } /** @@ -295,7 +296,7 @@ public Map mappings() { */ public PutIndexTemplateRequest source(XContentBuilder templateBuilder) { try { - return source(templateBuilder.bytes(), templateBuilder.contentType()); + return source(BytesReference.bytes(templateBuilder), templateBuilder.contentType()); } catch (Exception e) { throw new IllegalArgumentException("Failed to build json for template request", e); } @@ -412,7 +413,7 @@ public PutIndexTemplateRequest aliases(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - return aliases(builder.bytes()); + return aliases(BytesReference.bytes(builder)); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } @@ -422,7 +423,7 @@ public PutIndexTemplateRequest aliases(Map source) { * Sets the aliases that will be associated with the index when it gets created */ public PutIndexTemplateRequest aliases(XContentBuilder source) { - return aliases(source.bytes()); + return aliases(BytesReference.bytes(source)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index f2ddca1955878..024000ade806d 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -43,7 +43,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.shard.ShardId; @@ -75,7 +74,7 @@ public class IndexRequest extends ReplicatedWriteRequest implements DocWriteRequest, CompositeIndicesRequest { /** - * Max length of the source document to include into toString() + * Max length of the source document to include into string() * * @see ReplicationRequest#createTask */ @@ -332,7 +331,7 @@ public IndexRequest source(String source, XContentType xContentType) { * Sets the content source to index. */ public IndexRequest source(XContentBuilder sourceBuilder) { - return source(sourceBuilder.bytes(), sourceBuilder.contentType()); + return source(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java index 978ee6c58bf42..751b13e14f8ef 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java @@ -307,7 +307,7 @@ public static byte[] writeMultiLineFormat(MultiSearchRequest multiSearchRequest, xContentBuilder.field("allow_partial_search_results", request.allowPartialSearchResults()); } xContentBuilder.endObject(); - xContentBuilder.bytes().writeTo(output); + BytesReference.bytes(xContentBuilder).writeTo(output); } output.write(xContent.streamSeparator()); try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) { @@ -317,7 +317,7 @@ public static byte[] writeMultiLineFormat(MultiSearchRequest multiSearchRequest, xContentBuilder.startObject(); xContentBuilder.endObject(); } - xContentBuilder.bytes().writeTo(output); + BytesReference.bytes(xContentBuilder).writeTo(output); } output.write(xContent.streamSeparator()); } diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index 62d6938c8414a..a0de2092e3811 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -256,7 +256,7 @@ public XContentType xContentType() { * Sets an artificial document from which term vectors are requested for. */ public TermVectorsRequest doc(XContentBuilder documentBuilder) { - return this.doc(documentBuilder.bytes(), true, documentBuilder.contentType()); + return this.doc(BytesReference.bytes(documentBuilder), true, documentBuilder.contentType()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java index 21a77c2e0f2b3..01a9812516bf7 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java @@ -259,7 +259,8 @@ private void buildValues(XContentBuilder builder, Terms curTerms, int termFreq) builder.field(FieldStrings.END_OFFSET, currentEndOffset[i]); } if (curTerms.hasPayloads() && (currentPayloads[i].length() > 0)) { - builder.field(FieldStrings.PAYLOAD, currentPayloads[i]); + BytesRef bytesRef = currentPayloads[i].toBytesRef(); + builder.field(FieldStrings.PAYLOAD, bytesRef.bytes, bytesRef.offset, bytesRef.length); } builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index fbf005415d96d..4ee49f2407b5d 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -356,7 +356,7 @@ public static GetResult extractGetResult(final UpdateRequest request, String con BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) { builder.value(value); - sourceFilteredAsBytes = builder.bytes(); + sourceFilteredAsBytes = BytesReference.bytes(builder); } } catch (IOException e) { throw new ElasticsearchException("Error filtering source", e); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java index c0262a6d01d0b..3293be21859bd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -244,7 +245,7 @@ public Builder filter(Map filter) { } try { XContentBuilder builder = XContentFactory.jsonBuilder().map(filter); - this.filter = new CompressedXContent(builder.bytes()); + this.filter = new CompressedXContent(BytesReference.bytes(builder)); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to build json for alias request", e); @@ -252,11 +253,7 @@ public Builder filter(Map filter) { } public Builder filter(XContentBuilder filterBuilder) { - try { - return filter(filterBuilder.string()); - } catch (IOException e) { - throw new ElasticsearchGenerationException("Failed to build json for alias request", e); - } + return filter(Strings.toString(filterBuilder)); } public Builder routing(String routing) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index 6945c8d1d5407..a8711b08f9d2d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; @@ -463,7 +464,7 @@ public static IndexTemplateMetaData fromXContent(XContentParser parser, String t String mappingType = currentFieldName; Map mappingSource = MapBuilder.newMapBuilder().put(mappingType, parser.mapOrdered()).map(); - builder.putMapping(mappingType, XContentFactory.jsonBuilder().map(mappingSource).string()); + builder.putMapping(mappingType, Strings.toString(XContentFactory.jsonBuilder().map(mappingSource))); } } } else if ("aliases".equals(currentFieldName)) { @@ -487,7 +488,7 @@ public static IndexTemplateMetaData fromXContent(XContentParser parser, String t Map mapping = parser.mapOrdered(); if (mapping.size() == 1) { String mappingType = mapping.keySet().iterator().next(); - String mappingSource = XContentFactory.jsonBuilder().map(mapping).string(); + String mappingSource = Strings.toString(XContentFactory.jsonBuilder().map(mapping)); if (mappingSource == null) { // crap, no mapping source, warn? diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 9cbfb2ec71f16..c5e8ed729674d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.Diff; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -99,7 +100,7 @@ public MappingMetaData(CompressedXContent mapping) throws IOException { public MappingMetaData(String type, Map mapping) throws IOException { this.type = type; XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); - this.source = new CompressedXContent(mappingBuilder.bytes()); + this.source = new CompressedXContent(BytesReference.bytes(mappingBuilder)); Map withoutType = mapping; if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map) mapping.get(type); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 06aa51f612bcc..9fff294daea19 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -1081,7 +1081,7 @@ public static String toXContent(MetaData metaData) throws IOException { builder.startObject(); toXContent(metaData, builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } public static void toXContent(MetaData metaData, XContentBuilder builder, ToXContent.Params params) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/Strings.java b/server/src/main/java/org/elasticsearch/common/Strings.java index 8c823f401a0f8..6c2068197866e 100644 --- a/server/src/main/java/org/elasticsearch/common/Strings.java +++ b/server/src/main/java/org/elasticsearch/common/Strings.java @@ -755,6 +755,14 @@ public static String toString(ToXContent toXContent) { return toString(toXContent, false, false); } + /** + * Returns a string representation of the builder (only applicable for text based xcontent). + * @param xContentBuilder + */ + public static String toString(XContentBuilder xContentBuilder) { + return BytesReference.bytes(xContentBuilder).utf8ToString(); + } + /** * Return a {@link String} that is the json representation of the provided {@link ToXContent}. * Wraps the output into an anonymous object if needed. Allows to control whether the outputted @@ -771,7 +779,7 @@ public static String toString(ToXContent toXContent, boolean pretty, boolean hum if (toXContent.isFragment()) { builder.endObject(); } - return builder.string(); + return toString(builder); } catch (IOException e) { try { XContentBuilder builder = createBuilder(pretty, human); @@ -779,7 +787,7 @@ public static String toString(ToXContent toXContent, boolean pretty, boolean hum builder.field("error", "error building toString out of XContent: " + e.getMessage()); builder.field("stack_trace", ExceptionsHelper.stackTrace(e)); builder.endObject(); - return builder.string(); + return toString(builder); } catch (IOException e2) { throw new ElasticsearchException("cannot generate error message for deserialization", e); } @@ -845,5 +853,4 @@ public static String padStart(String s, int minimumLength, char c) { return sb.toString(); } } - } diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java index 92632ad7874fd..2668a375d1dfa 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java @@ -21,8 +21,11 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; +import org.elasticsearch.common.io.stream.BytesStream; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import java.io.ByteArrayOutputStream; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; @@ -36,6 +39,20 @@ public abstract class BytesReference implements Accountable, Comparable> additionalSettings, Li xContentBuilder.startObject(); indexSettings.toXContent(xContentBuilder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true"))); xContentBuilder.endObject(); - builder.append(xContentBuilder.string()); + builder.append(Strings.toString(xContentBuilder)); } builder.append("'"); builder.append(System.lineSeparator()); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java b/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java index 0f25231634d07..aeb4e53690a69 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java @@ -219,7 +219,7 @@ public void declareRawObject(BiConsumer consumer, ParseFi try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.prettyPrint(); builder.copyCurrentStructure(p); - return builder.bytes(); + return BytesReference.bytes(builder); } }; declareField(consumer, bytesParser, field, ValueType.OBJECT); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index 16f0ac83a849f..9e1bb362d4879 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -20,10 +20,7 @@ package org.elasticsearch.common.xcontent; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.io.stream.BytesStream; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.ByteSizeValue; @@ -173,6 +170,13 @@ public XContentType contentType() { return generator.contentType(); } + /** + * @return the output stream to which the built object is being written. Note that is dangerous to modify the stream. + */ + public OutputStream getOutputStream() { + return bos; + } + public XContentBuilder prettyPrint() { generator.usePrettyPrint(); return this; @@ -626,24 +630,6 @@ public XContentBuilder utf8Value(BytesRef value) throws IOException { return this; } - /** - * Writes the binary content of the given {@link BytesReference}. - * - * Use {@link org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back - */ - public XContentBuilder field(String name, BytesReference value) throws IOException { - return field(name).value(value); - } - - /** - * Writes the binary content of the given {@link BytesReference}. - * - * Use {@link org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back - */ - public XContentBuilder value(BytesReference value) throws IOException { - return (value == null) ? nullValue() : binaryValue(value.toBytesRef()); - } - //////////////////////////////////////////////////////////////////////////// // Text ////////////////////////////////// @@ -810,8 +796,6 @@ private void unknownValue(Object value, boolean ensureNoSelfReferences) throws I value((Calendar) value); } else if (value instanceof ReadableInstant) { value((ReadableInstant) value); - } else if (value instanceof BytesReference) { - value((BytesReference) value); } else if (value instanceof ToXContent) { value((ToXContent) value); } else { @@ -982,28 +966,6 @@ public XContentBuilder rawField(String name, InputStream value, XContentType con return this; } - /** - * Writes a raw field with the given bytes as the value - * @deprecated use {@link #rawField(String name, BytesReference, XContentType)} to avoid content type auto-detection - */ - @Deprecated - public XContentBuilder rawField(String name, BytesReference value) throws IOException { - try (InputStream stream = value.streamInput()) { - generator.writeRawField(name, stream); - } - return this; - } - - /** - * Writes a raw field with the given bytes as the value - */ - public XContentBuilder rawField(String name, BytesReference value, XContentType contentType) throws IOException { - try (InputStream stream = value.streamInput()) { - generator.writeRawField(name, stream, contentType); - } - return this; - } - /** * Writes a value with the source coming directly from the bytes in the stream */ @@ -1035,22 +997,6 @@ public XContentGenerator generator() { return this.generator; } - public BytesReference bytes() { - close(); - if (bos instanceof ByteArrayOutputStream) { - return new BytesArray(((ByteArrayOutputStream) bos).toByteArray()); - } else { - return ((BytesStream) bos).bytes(); - } - } - - /** - * Returns a string representation of the builder (only applicable for text based xcontent). - */ - public String string() throws IOException { - return bytes().utf8ToString(); - } - static void ensureNameNotNull(String name) { ensureNotNull(name, "Field name cannot be null"); } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 48f3685720f9a..e392295722959 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.Compressor; @@ -174,7 +175,7 @@ public static String convertToJson(BytesReference bytes, boolean reformatJson, b builder.prettyPrint(); } builder.copyCurrentStructure(parser); - return builder.string(); + return Strings.toString(builder); } } @@ -371,7 +372,7 @@ public static void copyCurrentEvent(XContentGenerator generator, XContentParser /** * Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using - * {@link XContentBuilder#rawField(String, org.elasticsearch.common.bytes.BytesReference)}. + * {@link XContentBuilder#rawField(String, InputStream)}. * @deprecated use {@link #writeRawField(String, BytesReference, XContentType, XContentBuilder, Params)} to avoid content type * auto-detection */ @@ -383,13 +384,15 @@ public static void writeRawField(String field, BytesReference source, XContentBu builder.rawField(field, compressedStreamInput); } } else { - builder.rawField(field, source); + try (InputStream stream = source.streamInput()) { + builder.rawField(field, stream); + } } } /** * Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using - * {@link XContentBuilder#rawField(String, org.elasticsearch.common.bytes.BytesReference, XContentType)}. + * {@link XContentBuilder#rawField(String, InputStream, XContentType)}. */ public static void writeRawField(String field, BytesReference source, XContentType xContentType, XContentBuilder builder, ToXContent.Params params) throws IOException { @@ -400,7 +403,9 @@ public static void writeRawField(String field, BytesReference source, XContentTy builder.rawField(field, compressedStreamInput, xContentType); } } else { - builder.rawField(field, source, xContentType); + try (InputStream stream = source.streamInput()) { + builder.rawField(field, stream, xContentType); + } } } @@ -428,7 +433,7 @@ public static BytesReference toXContent(ToXContent toXContent, XContentType xCon if (toXContent.isFragment()) { builder.endObject(); } - return builder.bytes(); + return BytesReference.bytes(builder); } } } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java index a9037b74ce9ed..a645bf81da343 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java @@ -229,7 +229,6 @@ enum NumberType { * *
    *
  • {@link XContentBuilder#field(String, org.apache.lucene.util.BytesRef)}
  • - *
  • {@link XContentBuilder#field(String, org.elasticsearch.common.bytes.BytesReference)}
  • *
  • {@link XContentBuilder#field(String, byte[], int, int)}}
  • *
  • {@link XContentBuilder#field(String, byte[])}}
  • *
diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index 4cdf2a4892690..ae59c6f507749 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -304,7 +304,7 @@ public static GetResult fromXContentEmbedded(XContentParser parser, String index //the original document gets slightly modified: whitespaces or pretty printing are not preserved, //it all depends on the current builder settings builder.copyCurrentStructure(parser); - source = builder.bytes(); + source = BytesReference.bytes(builder); } } else if (FIELDS.equals(currentFieldName)) { while(parser.nextToken() != XContentParser.Token.END_OBJECT) { diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 0aeb4f3f19d58..dcd18c8f313f9 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -227,7 +227,7 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[] sourceAsMap = typeMapTuple.v2(); sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes()); try { - source = XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap).bytes(); + source = BytesReference.bytes(XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap)); } catch (IOException e) { throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "] with includes/excludes set", e); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index e3585690d0847..a7410ea5efc89 100755 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; @@ -270,7 +271,7 @@ public void merge(Map> mappings, MergeReason reason, Map mappingSourcesCompressed = new LinkedHashMap<>(mappings.size()); for (Map.Entry> entry : mappings.entrySet()) { try { - mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string())); + mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(Strings.toString(XContentFactory.jsonBuilder().map(entry.getValue())))); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index 8a90de4d47aa5..0ca2b2ea94447 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -146,7 +147,7 @@ public String toString() { try { XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); toXContent(builder, new ToXContent.MapParams(emptyMap())); - return builder.endObject().string(); + return Strings.toString(builder.endObject()); } catch (IOException bogus) { throw new UncheckedIOException(bogus); } diff --git a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index de7241517eb20..391d448ae2113 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -57,6 +57,7 @@ import org.elasticsearch.index.mapper.UidFieldMapper; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -209,7 +210,7 @@ public Item(@Nullable String index, @Nullable String type, XContentBuilder doc) } this.index = index; this.type = type; - this.doc = doc.bytes(); + this.doc = BytesReference.bytes(doc); this.xContentType = doc.contentType(); } @@ -374,7 +375,7 @@ public static Item parse(XContentParser parser, Item item) throws IOException { } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { item.id = parser.text(); } else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) { - item.doc = jsonBuilder().copyCurrentStructure(parser).bytes(); + item.doc = BytesReference.bytes(jsonBuilder().copyCurrentStructure(parser)); item.xContentType = XContentType.JSON; } else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { if (token == XContentParser.Token.START_ARRAY) { @@ -425,7 +426,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(ID.getPreferredName(), this.id); } if (this.doc != null) { - builder.rawField(DOC.getPreferredName(), this.doc, xContentType); + try (InputStream stream = this.doc.streamInput()) { + builder.rawField(DOC.getPreferredName(), stream, xContentType); + } } if (this.fields != null) { builder.array(FIELDS.getPreferredName(), this.fields); @@ -451,7 +454,7 @@ public String toString() { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.prettyPrint(); toXContent(builder, EMPTY_PARAMS); - return builder.string(); + return Strings.toString(builder); } catch (Exception e) { return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index fd55cff0ccefb..aa39d5f7417fa 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -104,7 +104,7 @@ protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Ob } builder.field(DECAY, decay); builder.endObject(); - this.functionBytes = builder.bytes(); + this.functionBytes = BytesReference.bytes(builder); } catch (IOException e) { throw new IllegalArgumentException("unable to build inner function object",e); } @@ -149,7 +149,9 @@ public BytesReference getFunctionBytes() { @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); - builder.rawField(fieldName, functionBytes); + try (InputStream stream = functionBytes.streamInput()) { + builder.rawField(fieldName, stream); + } builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name()); builder.endObject(); } diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java index 989c52d8fd46e..2a05cc62f0091 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java @@ -109,7 +109,7 @@ public DFB fromXContent(XContentParser parser) throws IOException, ParsingExcept fieldName = currentFieldName; XContentBuilder builder = XContentFactory.jsonBuilder(); builder.copyCurrentStructure(parser); - functionBytes = builder.bytes(); + functionBytes = BytesReference.bytes(builder); } else if (MULTI_VALUE_MODE.match(currentFieldName, parser.getDeprecationHandler())) { multiValueMode = MultiValueMode.fromString(parser.text()); } else { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java index 3faf5e3ec8a73..a89fdcacb2bc3 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -936,7 +937,7 @@ public synchronized String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index 3d96493746805..4dea9eb6b5f68 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -50,7 +49,7 @@ public final class PipelineConfiguration extends AbstractDiffable { XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); XContentHelper.copyCurrentStructure(contentBuilder.generator(), parser); - builder.setConfig(contentBuilder.bytes(), contentBuilder.contentType()); + builder.setConfig(BytesReference.bytes(contentBuilder), contentBuilder.contentType()); }, new ParseField("config"), ObjectParser.ValueType.OBJECT); } diff --git a/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index 11daaddd14720..f8575b4a0127e 100644 --- a/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -54,7 +54,7 @@ public class BytesRestResponse extends RestResponse { * Creates a new response based on {@link XContentBuilder}. */ public BytesRestResponse(RestStatus status, XContentBuilder builder) { - this(status, builder.contentType().mediaType(), builder.bytes()); + this(status, builder.contentType().mediaType(), BytesReference.bytes(builder)); } /** @@ -94,7 +94,7 @@ public BytesRestResponse(RestChannel channel, Exception e) throws IOException { public BytesRestResponse(RestChannel channel, RestStatus status, Exception e) throws IOException { this.status = status; try (XContentBuilder builder = build(channel, status, e)) { - this.content = builder.bytes(); + this.content = BytesReference.bytes(builder); this.contentType = builder.contentType().mediaType(); } if (e instanceof ElasticsearchException) { diff --git a/server/src/main/java/org/elasticsearch/script/Script.java b/server/src/main/java/org/elasticsearch/script/Script.java index 7361bd2fc2a71..a64a3ecd37640 100644 --- a/server/src/main/java/org/elasticsearch/script/Script.java +++ b/server/src/main/java/org/elasticsearch/script/Script.java @@ -21,7 +21,9 @@ import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -159,7 +161,7 @@ private void setInline(XContentParser parser) { if (parser.currentToken() == Token.START_OBJECT) { //this is really for search templates, that need to be converted to json format XContentBuilder builder = XContentFactory.jsonBuilder(); - idOrCode = builder.copyCurrentStructure(parser).string(); + idOrCode = Strings.toString(builder.copyCurrentStructure(parser)); options.put(CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()); } else { idOrCode = parser.text(); @@ -283,7 +285,7 @@ public static Script parse(Settings settings) { builder.startObject(); settings.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - try (InputStream stream = builder.bytes().streamInput(); + try (InputStream stream = BytesReference.bytes(builder).streamInput(); XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return parse(parser); @@ -639,7 +641,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) if (type == ScriptType.INLINE) { if (contentType != null && builder.contentType().mediaType().equals(contentType)) { - builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), new BytesArray(idOrCode)); + try (InputStream stream = new BytesArray(idOrCode).streamInput()) { + builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), stream); + } } else { builder.field(SOURCE_PARSE_FIELD.getPreferredName(), idOrCode); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptException.java b/server/src/main/java/org/elasticsearch/script/ScriptException.java index 91e6ad401fc88..726f218610833 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptException.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptException.java @@ -26,6 +26,7 @@ import java.util.Objects; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -49,11 +50,11 @@ public class ScriptException extends ElasticsearchException { private final List scriptStack; private final String script; private final String lang; - + /** * Create a new ScriptException. - * @param message A short and simple summary of what happened, such as "compile error". - * Must not be {@code null}. + * @param message A short and simple summary of what happened, such as "compile error". + * Must not be {@code null}. * @param cause The underlying cause of the exception. Must not be {@code null}. * @param scriptStack An implementation-specific "stacktrace" for the error in the script. * Must not be {@code null}, but can be empty (though this should be avoided if possible). @@ -85,7 +86,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(script); out.writeString(lang); } - + @Override protected void metadataToXContent(XContentBuilder builder, Params params) throws IOException { builder.field("script_stack", scriptStack); @@ -100,7 +101,7 @@ protected void metadataToXContent(XContentBuilder builder, Params params) throws public List getScriptStack() { return scriptStack; } - + /** * Returns the identifier for which script. * @return script's name or source text that identifies the script. @@ -108,7 +109,7 @@ public List getScriptStack() { public String getScript() { return script; } - + /** * Returns the language of the script. * @return the {@code lang} parameter of the scripting engine. @@ -117,7 +118,7 @@ public String getLang() { return lang; } - /** + /** * Returns a JSON version of this exception for debugging. */ public String toJsonString() { @@ -126,7 +127,7 @@ public String toJsonString() { json.startObject(); toXContent(json, ToXContent.EMPTY_PARAMS); json.endObject(); - return json.string(); + return Strings.toString(json); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java index f85ac50689d47..9c52ff943d2a1 100644 --- a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java +++ b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -110,7 +111,7 @@ private void setSource(XContentParser parser) { if (parser.currentToken() == Token.START_OBJECT) { //this is really for search templates, that need to be converted to json format XContentBuilder builder = XContentFactory.jsonBuilder(); - source = builder.copyCurrentStructure(parser).string(); + source = Strings.toString(builder.copyCurrentStructure(parser)); options.put(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()); } else { source = parser.text(); @@ -292,7 +293,7 @@ public static StoredScriptSource parse(BytesReference content, XContentType xCon builder.copyCurrentStructure(parser); } - return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, builder.string(), Collections.emptyMap()); + return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, Strings.toString(builder), Collections.emptyMap()); } } } catch (IOException ioe) { diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 04af2406bb8f4..96a5ebc25e2da 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -585,7 +585,7 @@ private static BytesReference parseSourceBytes(XContentParser parser) throws IOE // pretty printing are not preserved, // it all depends on the current builder settings builder.copyCurrentStructure(parser); - return builder.bytes(); + return BytesReference.bytes(builder); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java index 403bf833878bf..2da74c56f6a33 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhase.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.fetch.subphase; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.SearchHit; @@ -71,7 +72,7 @@ public void hitExecute(SearchContext context, HitContext hitContext) { builder.startObject(); builder.endObject(); } - hitContext.hit().sourceRef(builder.bytes()); + hitContext.hit().sourceRef(BytesReference.bytes(builder)); } catch (IOException e) { throw new ElasticsearchException("Error filtering source", e); } diff --git a/server/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java b/server/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java index 389b81ffcbad4..7b2cedea64abc 100644 --- a/server/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java @@ -26,6 +26,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -284,7 +285,7 @@ public String toString() { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.prettyPrint(); toXContent(builder, EMPTY_PARAMS); - return builder.string(); + return Strings.toString(builder); } catch (Exception e) { throw new ElasticsearchException("Failed to build xcontent.", e); } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java index c743eb259e96f..a54f1193df008 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -767,7 +768,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return builder.string(); + return Strings.toString(builder); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 97dcd27b5d99f..4f5c3b789f892 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -44,6 +44,7 @@ import org.elasticsearch.search.suggest.completion.context.ContextMappings; import java.io.IOException; +import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -94,7 +95,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder explain json output: \n{}", explanation.toXContent(builder, ToXContent.EMPTY_PARAMS).string()); + logger.debug("--> explain json output: \n{}", Strings.toString(explanation.toXContent(builder, ToXContent.EMPTY_PARAMS))); } return explanation; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java index bce9afd1c1f57..7e61be59d9518 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.allocation.MoveDecision; import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent; @@ -90,7 +91,7 @@ public void testExplanationToXContent() throws Exception { "{\"id\":\"node-0\",\"name\":\"\",\"transport_address\":\"" + cae.getCurrentNode().getAddress() + "\",\"weight_ranking\":3},\"can_remain_on_current_node\":\"yes\",\"can_rebalance_cluster\":\"yes\"," + "\"can_rebalance_to_other_node\":\"no\",\"rebalance_explanation\":\"cannot rebalance as no target node exists " + - "that can both allocate this shard and improve the cluster balance\"}", builder.string()); + "that can both allocate this shard and improve the cluster balance\"}", Strings.toString(builder)); } private static ClusterAllocationExplanation randomClusterAllocationExplanation(boolean assignedShard) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 2fb23b26709bd..cb6f2b57b2bd0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -38,6 +38,8 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -796,7 +798,7 @@ private Map serialize(ListTasksResponse response, boolean byPare } builder.endObject(); builder.flush(); - logger.info(builder.string()); - return XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + logger.info(Strings.toString(builder)); + return XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index 77b5ccc09abad..8d1a306f26094 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -209,7 +210,7 @@ private RestRequest toRestRequest(ClusterRerouteRequest original) throws IOExcep FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withParams(params); if (hasBody) { - requestBuilder.withContent(builder.bytes(), builder.contentType()); + requestBuilder.withContent(BytesReference.bytes(builder), builder.contentType()); } return requestBuilder.build(); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java index 79b4fa6dedc1e..4ced505717a2e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -123,7 +124,7 @@ public void testToXContent() throws IOException { " }\n" + " }\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } { @@ -156,7 +157,7 @@ public void testToXContent() throws IOException { " ]\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } { XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); @@ -196,7 +197,7 @@ public void testToXContent() throws IOException { " }\n" + " }\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java index fdca03ebcda4e..6c9277a61bdee 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java @@ -49,8 +49,8 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws if (addRandomFields) { String unsupportedField = "unsupported_field"; - BytesReference mutated = XContentTestUtils.insertIntoXContent(xContentType.xContent(), originalBytes, - Collections.singletonList(""), () -> unsupportedField, () -> randomAlphaOfLengthBetween(3, 10)).bytes(); + BytesReference mutated = BytesReference.bytes(XContentTestUtils.insertIntoXContent(xContentType.xContent(), originalBytes, + Collections.singletonList(""), () -> unsupportedField, () -> randomAlphaOfLengthBetween(3, 10))); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> ClusterUpdateSettingsRequest.fromXContent(createParser(xContentType.xContent(), mutated))); assertThat(iae.getMessage(), diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java index 01c2457f96744..1811bfb89a62d 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -153,7 +154,7 @@ public void testParseAdd() throws IOException { if (filter == null || filter.isEmpty()) { assertNull(action.filter()); } else { - assertEquals(XContentFactory.contentBuilder(XContentType.JSON).map(filter).string(), action.filter()); + assertEquals(Strings.toString(XContentFactory.contentBuilder(XContentType.JSON).map(filter)), action.filter()); } assertEquals(Objects.toString(searchRouting, null), action.searchRouting()); assertEquals(Objects.toString(indexRouting, null), action.indexRouting()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java index 10a1097aaf333..80b191398ada2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -45,7 +45,7 @@ public class CreateIndexRequestTests extends ESTestCase { public void testSerialization() throws IOException { CreateIndexRequest request = new CreateIndexRequest("foo"); - String mapping = JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject()); request.mapping("my_type", mapping, XContentType.JSON); try (BytesStreamOutput output = new BytesStreamOutput()) { @@ -63,7 +63,7 @@ public void testSerialization() throws IOException { public void testToXContent() throws IOException { CreateIndexRequest request = new CreateIndexRequest("foo"); - String mapping = JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject()); request.mapping("my_type", mapping, XContentType.JSON); Alias alias = new Alias("test_alias"); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index 0030dc3c7aee5..e816b08187f1b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestTests; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -86,7 +85,7 @@ public void testBuildFromSimplifiedDef() { public void testPutMappingRequestSerialization() throws IOException { PutMappingRequest request = new PutMappingRequest("foo"); - String mapping = YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject().string(); + String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject()); request.source(mapping, XContentType.YAML); assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.source()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index a2ef02af486a7..16afa92fb0377 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -171,7 +171,7 @@ public void testUnknownFields() throws IOException { builder.endObject(); } builder.endObject(); - BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, builder.bytes(), null, random()); + BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, BytesReference.bytes(builder), null, random()); expectThrows(ParsingException.class, () -> request.fromXContent(createParser(xContentType.xContent(), mutated))); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java index fbd8f8764cf73..d40199d1d103e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java @@ -30,9 +30,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.shard.ShardStateMetaData; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.NodeDisconnectedException; @@ -72,7 +70,7 @@ public void testBasicSerialization() throws Exception { contentBuilder.startObject(); storesResponse.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - BytesReference bytes = contentBuilder.bytes(); + BytesReference bytes = BytesReference.bytes(contentBuilder); try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { Map map = parser.map(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index d3c133915e7b8..f0e9a57f7f3e6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService.PutRequest; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -110,9 +111,9 @@ public void testIndexTemplateWithValidateEmptyMapping() throws Exception { public void testIndexTemplateWithValidateMapping() throws Exception { PutRequest request = new PutRequest("api", "validate_template"); request.patterns(Collections.singletonList("te*")); - request.putMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "text").field("analyzer", "custom_1").endObject() - .endObject().endObject().endObject().string()); + request.putMapping("type1", Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("field2").field("type", "text").field("analyzer", "custom_1").endObject() + .endObject().endObject().endObject())); List errors = putTemplateDetail(request); assertThat(errors.size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java index fca6ca4fd84d9..72cbe2bd9ecab 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -77,7 +78,7 @@ public void testPutIndexTemplateRequest510() throws IOException { public void testPutIndexTemplateRequestSerializationXContent() throws IOException { PutIndexTemplateRequest request = new PutIndexTemplateRequest("foo"); - String mapping = YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject().string(); + String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject()); request.patterns(Collections.singletonList("foo")); request.mapping("bar", mapping, XContentType.YAML); assertNotEquals(mapping, request.mappings().get("bar")); @@ -106,7 +107,7 @@ public void testPutIndexTemplateRequestSerializationXContentBwc() throws IOExcep in.setVersion(version); PutIndexTemplateRequest request = new PutIndexTemplateRequest(); request.readFrom(in); - String mapping = YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject().string(); + String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject()); assertNotEquals(mapping, request.mappings().get("bar")); assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.mappings().get("bar")); assertEquals("foo", request.name()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java index 4a55f0c8b95bf..20a42407720ff 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java @@ -104,7 +104,7 @@ public void testFailureToAndFromXContent() throws IOException { // Shuffle the XContent fields if (randomBoolean()) { try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - originalBytes = shuffleXContent(parser, randomBoolean()).bytes(); + originalBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index 44e0bbf823063..76a99994e04ee 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -307,8 +308,8 @@ private static MultiGetRequestBuilder indexDocs(Client client, BulkProcessor pro .source(Requests.INDEX_CONTENT_TYPE, "field", randomRealisticUnicodeOfLengthBetween(1, 30))); } else { final String source = "{ \"index\":{\"_index\":\"test\",\"_type\":\"test\",\"_id\":\"" + Integer.toString(i) + "\"} }\n" - + JsonXContent.contentBuilder() - .startObject().field("field", randomRealisticUnicodeOfLengthBetween(1, 30)).endObject().string() + "\n"; + + Strings.toString(JsonXContent.contentBuilder() + .startObject().field("field", randomRealisticUnicodeOfLengthBetween(1, 30)).endObject()) + "\n"; processor.add(new BytesArray(source), null, null, XContentType.JSON); } multiGetRequestBuilder.add("test", "test", Integer.toString(i)); diff --git a/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java b/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java index d607a473b9add..ee4be1a5396ce 100644 --- a/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/GetResponseTests.java @@ -81,7 +81,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws parsedGetResponse = GetResponse.fromXContent(parser); assertNull(parser.nextToken()); } - assertEquals(expectedGetResponse, parsedGetResponse); + assertEquals(expectedGetResponse.getSourceAsMap(), parsedGetResponse.getSourceAsMap()); //print the parsed object out and test that the output is the same as the original output BytesReference finalBytes = toXContent(parsedGetResponse, xContentType, humanReadable); assertToXContentEquivalent(originalBytes, finalBytes, xContentType); diff --git a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index b04c7dfcd84f8..4d8e0f544c458 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.ingest; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -127,7 +128,7 @@ public void testToXContent() throws IOException { builder.startObject(); writeableIngestDocument.toXContent(builder, EMPTY_PARAMS); builder.endObject(); - Map toXContentMap = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map toXContentMap = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); Map toXContentDoc = (Map) toXContentMap.get("doc"); Map toXContentSource = (Map) toXContentDoc.get("_source"); diff --git a/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java b/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java index 03e75f90ff9e3..75269098bed80 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -74,7 +75,7 @@ public void testToXContent() throws IOException { + "\"minimum_wire_compatibility_version\":\"" + version.minimumCompatibilityVersion().toString() + "\"," + "\"minimum_index_compatibility_version\":\"" + version.minimumIndexCompatibilityVersion().toString() + "\"}," + "\"tagline\":\"You Know, for Search\"" - + "}", builder.string()); + + "}", Strings.toString(builder)); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/ClearScrollRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/ClearScrollRequestTests.java index 6414e510069a0..9aaf10930b091 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ClearScrollRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ClearScrollRequestTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -84,7 +85,7 @@ public void testToXContent() throws IOException { clearScrollRequest.addScrollId("SCROLL_ID"); try (XContentBuilder builder = JsonXContent.contentBuilder()) { clearScrollRequest.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"scroll_id\":[\"SCROLL_ID\"]}", builder.string()); + assertEquals("{\"scroll_id\":[\"SCROLL_ID\"]}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java index c542f3b72f9f2..e96a0975fd46c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java @@ -135,7 +135,7 @@ public void testToXContent() throws IOException { "\"col\":7" + "}" + "}" + - "]}", builder.string()); + "]}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java index 6ec9f95f489de..59e9a9f486109 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -106,7 +107,7 @@ public void testToXContent() throws IOException { searchScrollRequest.scroll("1m"); try (XContentBuilder builder = JsonXContent.contentBuilder()) { searchScrollRequest.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"scroll_id\":\"SCROLL_ID\",\"scroll\":\"1m\"}", builder.string()); + assertEquals("{\"scroll_id\":\"SCROLL_ID\",\"scroll\":\"1m\"}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/action/support/DefaultShardOperationFailedExceptionTests.java b/server/src/test/java/org/elasticsearch/action/support/DefaultShardOperationFailedExceptionTests.java index 28099506e08e6..8241628b55021 100644 --- a/server/src/test/java/org/elasticsearch/action/support/DefaultShardOperationFailedExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/DefaultShardOperationFailedExceptionTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -96,7 +97,7 @@ public void testFromXContent() throws IOException { .endObject(); builder = shuffleXContent(builder); DefaultShardOperationFailedException parsed; - try(XContentParser parser = createParser(xContent, builder.bytes())) { + try(XContentParser parser = createParser(xContent, BytesReference.bytes(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); parsed = DefaultShardOperationFailedException.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java index fd61bbc6d71e9..9ec91f4e45091 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationResponseTests.java @@ -82,7 +82,7 @@ public void testShardInfoToAndFromXContent() throws IOException { // Shuffle the XContent fields if (randomBoolean()) { try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - originalBytes = shuffleXContent(parser, randomBoolean()).bytes(); + originalBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); } } diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 36266026504a9..ddf4f32c2c2b4 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -51,7 +51,6 @@ import org.junit.Before; import java.io.IOException; -import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -476,7 +475,7 @@ public void testToAndFromXContent() throws IOException { if (randomBoolean()) { try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - originalBytes = shuffleXContent(parser, randomBoolean()).bytes(); + originalBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java index f44d0b7c4036e..129b347889504 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -354,7 +355,7 @@ private CompressedXContent createMapping() throws IOException { } private CompressedXContent createMapping(String fieldType) throws IOException { - final String mapping = XContentFactory.jsonBuilder() + final String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") @@ -363,7 +364,7 @@ private CompressedXContent createMapping(String fieldType) throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); return new CompressedXContent(mapping); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java index ef801dad28eb4..344b6dc42caed 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -73,7 +74,7 @@ public void testXContent() throws IOException { assertThat(Strings.toString(graveyard, false, true), containsString(XContentBuilder.DEFAULT_DATE_PRINTER.print(graveyard.getTombstones().get(0).getDeleteDateInMillis()))); } - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); parser.nextToken(); // the beginning of the parser assertThat(IndexGraveyard.fromXContent(parser), equalTo(graveyard)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java index 3f21bd29ff3b8..5a206407648b6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; @@ -56,7 +57,7 @@ public void testIndexMetaDataSerialization() throws IOException { builder.startObject(); metaData.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); final IndexMetaData fromXContentMeta = IndexMetaData.fromXContent(parser); assertEquals(metaData, fromXContentMeta); assertEquals(metaData.hashCode(), fromXContentMeta.hashCode()); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java index 0d2443c42021e..d6eb00c499e9c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java @@ -113,7 +113,7 @@ public void testIndexTemplateMetaDataXContentRoundTrip() throws Exception { builder.startObject(); IndexTemplateMetaData.Builder.toXContent(indexTemplateMetaData, builder, params); builder.endObject(); - templateBytesRoundTrip = builder.bytes(); + templateBytesRoundTrip = BytesReference.bytes(builder); } final IndexTemplateMetaData indexTemplateMetaDataRoundTrip; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 2e670666c61a2..74d13a2aab046 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -169,12 +169,12 @@ public void testResolveIndexRouting() { } public void testUnknownFieldClusterMetaData() throws IOException { - BytesReference metadata = JsonXContent.contentBuilder() + BytesReference metadata = BytesReference.bytes(JsonXContent.contentBuilder() .startObject() .startObject("meta-data") .field("random", "value") .endObject() - .endObject().bytes(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); try { MetaData.Builder.fromXContent(parser); @@ -185,12 +185,12 @@ public void testUnknownFieldClusterMetaData() throws IOException { } public void testUnknownFieldIndexMetaData() throws IOException { - BytesReference metadata = JsonXContent.contentBuilder() + BytesReference metadata = BytesReference.bytes(JsonXContent.contentBuilder() .startObject() .startObject("index_name") .field("random", "value") .endObject() - .endObject().bytes(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); try { IndexMetaData.Builder.fromXContent(parser); @@ -219,7 +219,7 @@ public void testXContentWithIndexGraveyard() throws IOException { builder.startObject(); originalMeta.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); final MetaData fromXContentMeta = MetaData.fromXContent(parser); assertThat(fromXContentMeta.indexGraveyard(), equalTo(originalMeta.indexGraveyard())); } @@ -321,7 +321,7 @@ public void testFindMappingsWithFilters() throws IOException { Map doc = (Map)stringObjectMap.get("_doc"); try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.map(doc); - mapping = builder.string(); + mapping = Strings.toString(builder); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java index 949d4f350080c..8a62e14ba579a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java @@ -114,7 +114,7 @@ public void testSerialization() throws IOException { if (randomBoolean()) { allocationId = AllocationId.newRelocation(allocationId); } - BytesReference bytes = allocationId.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).bytes(); + BytesReference bytes = BytesReference.bytes(allocationId.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); AllocationId parsedAllocationId = AllocationId.fromXContent(createParser(JsonXContent.jsonXContent, bytes)); assertEquals(allocationId, parsedAllocationId); } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index fc987c7e3caf3..98a7fe514543f 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -27,6 +27,7 @@ import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -292,7 +293,7 @@ public void testParseInvalidMultipoint() throws IOException { public void testParseInvalidMultiPolygon() throws IOException { // test invalid multipolygon (an "accidental" polygon with inner rings outside outer ring) - String multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon") + String multiPolygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon") .startArray("coordinates") .startArray()//one poly (with two holes) .startArray() @@ -318,7 +319,7 @@ public void testParseInvalidMultiPolygon() throws IOException { .endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson); parser.nextToken(); @@ -327,7 +328,7 @@ public void testParseInvalidMultiPolygon() throws IOException { public void testParseOGCPolygonWithoutHoles() throws IOException { // test 1: ccw poly not crossing dateline - String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -338,7 +339,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -347,7 +348,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(-177.0).value(10.0).endArray() @@ -358,7 +359,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .startArray().value(-177.0).value(10.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -367,7 +368,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -378,7 +379,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -387,7 +388,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -398,7 +399,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -409,7 +410,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { public void testParseOGCPolygonWithHoles() throws IOException { // test 1: ccw poly not crossing dateline - String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -426,7 +427,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -435,7 +436,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(-177.0).value(10.0).endArray() @@ -452,7 +453,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { .startArray().value(178.0).value(8.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -461,7 +462,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -478,7 +479,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { .startArray().value(177.0).value(8.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -487,7 +488,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline - polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(183.0).value(10.0).endArray() @@ -504,7 +505,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { .startArray().value(178.0).value(8.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); @@ -519,83 +520,83 @@ public void testParseInvalidPolygon() throws IOException { * per the GeoJSON specification */ // test case 1: create an invalid polygon with only 2 points - String invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + String invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .startArray().value(-75.022).value(41.783).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 2: create an invalid polygon with only 1 point - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 3: create an invalid polygon with 0 points - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 4: create an invalid polygon with null value points - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().nullValue().nullValue().endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); // test case 5: create an invalid polygon with 1 invalid LinearRing - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .nullValue().nullValue() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); // test case 6: create an invalid polygon with 0 LinearRings - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates").endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 7: create an invalid polygon with 0 LinearRings - invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") + invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray().value(-74.011).value(40.753).endArray() .endArray() - .endObject().string(); + .endObject()); parser = createParser(JsonXContent.jsonXContent, invalidPoly); parser.nextToken(); @@ -650,7 +651,7 @@ public void testParsePolygonWithHole() throws IOException { public void testParseSelfCrossingPolygon() throws IOException { // test self crossing ccw poly not crossing dateline - String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -662,7 +663,7 @@ public void testParseSelfCrossingPolygon() throws IOException { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 69b69a2fcf61d..e6b31d95c85d1 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -20,6 +20,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -158,7 +159,7 @@ private void testFiltering(Settings source, Settings filtered, String... pattern xContentBuilder.startObject(); source.toXContent(xContentBuilder, request); xContentBuilder.endObject(); - String filteredSettingsString = xContentBuilder.string(); + String filteredSettingsString = Strings.toString(xContentBuilder); filteredSettings = Settings.builder().loadFromSource(filteredSettingsString, xContentBuilder.contentType()).build(); assertThat(filteredSettings, equalTo(filtered)); } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index f467edfb8d40c..ffb5a8aee421b 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -669,20 +670,20 @@ public void testToXContent() throws IOException { builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap())); builder.endObject(); - assertEquals("{\"foo\":{\"bar.baz\":\"test\",\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string()); + assertEquals("{\"foo\":{\"bar.baz\":\"test\",\"bar\":[\"1\",\"2\",\"3\"]}}", Strings.toString(builder)); test = Settings.builder().putList("foo.bar", "1", "2", "3").build(); builder = XContentBuilder.builder(XContentType.JSON.xContent()); builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap())); builder.endObject(); - assertEquals("{\"foo\":{\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string()); + assertEquals("{\"foo\":{\"bar\":[\"1\",\"2\",\"3\"]}}", Strings.toString(builder)); builder = XContentBuilder.builder(XContentType.JSON.xContent()); builder.startObject(); test.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true"))); builder.endObject(); - assertEquals("{\"foo.bar\":[\"1\",\"2\",\"3\"]}", builder.string()); + assertEquals("{\"foo.bar\":[\"1\",\"2\",\"3\"]}", Strings.toString(builder)); } public void testLoadEmptyStream() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index dbb47764158c9..e74d3b7acea97 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -85,13 +85,13 @@ public void testContentType() throws IOException { } public void testStartEndObject() throws IOException { - expectUnclosedException(() -> builder().startObject().bytes()); + expectUnclosedException(() -> BytesReference.bytes(builder().startObject())); expectUnclosedException(() -> builder().startObject().close()); - expectUnclosedException(() -> builder().startObject().string()); + expectUnclosedException(() -> Strings.toString(builder().startObject())); - expectObjectException(() -> builder().endObject().bytes()); + expectObjectException(() -> BytesReference.bytes(builder().endObject())); expectObjectException(() -> builder().endObject().close()); - expectObjectException(() -> builder().endObject().string()); + expectObjectException(() -> Strings.toString(builder().endObject())); expectValueException(() -> builder().startObject("foo").endObject()); expectNonNullFieldException(() -> builder().startObject().startObject(null)); @@ -109,13 +109,13 @@ public void testStartEndObject() throws IOException { } public void testStartEndArray() throws IOException { - expectUnclosedException(() -> builder().startArray().bytes()); + expectUnclosedException(() -> BytesReference.bytes(builder().startArray())); expectUnclosedException(() -> builder().startArray().close()); - expectUnclosedException(() -> builder().startArray().string()); + expectUnclosedException(() -> Strings.toString(builder().startArray())); - expectArrayException(() -> builder().endArray().bytes()); + expectArrayException(() -> BytesReference.bytes(builder().endArray())); expectArrayException(() -> builder().endArray().close()); - expectArrayException(() -> builder().endArray().string()); + expectArrayException(() -> Strings.toString(builder().endArray())); expectValueException(() -> builder().startArray("foo").endObject()); expectFieldException(() -> builder().startObject().startArray().endArray().endObject()); @@ -133,17 +133,17 @@ public void testStartEndArray() throws IOException { } public void testField() throws IOException { - expectValueException(() -> builder().field("foo").bytes()); - expectNonNullFieldException(() -> builder().field(null).bytes()); - expectUnclosedException(() -> builder().startObject().field("foo").bytes()); + expectValueException(() -> BytesReference.bytes(builder().field("foo"))); + expectNonNullFieldException(() -> BytesReference.bytes(builder().field(null))); + expectUnclosedException(() -> BytesReference.bytes(builder().startObject().field("foo"))); assertResult("{'foo':'bar'}", () -> builder().startObject().field("foo").value("bar").endObject()); } public void testNullField() throws IOException { - expectValueException(() -> builder().nullField("foo").bytes()); - expectNonNullFieldException(() -> builder().nullField(null).bytes()); - expectUnclosedException(() -> builder().startObject().nullField("foo").bytes()); + expectValueException(() -> BytesReference.bytes(builder().nullField("foo"))); + expectNonNullFieldException(() -> BytesReference.bytes(builder().nullField(null))); + expectUnclosedException(() -> BytesReference.bytes(builder().startObject().nullField("foo"))); assertResult("{'foo':null}", () -> builder().startObject().nullField("foo").endObject()); } @@ -272,7 +272,7 @@ public void testBinaryField() throws Exception { assertResult("{'binary':null}", () -> builder().startObject().field("binary", (byte[]) null).endObject()); final byte[] randomBytes = randomBytes(); - BytesReference bytes = builder().startObject().field("binary", randomBytes).endObject().bytes(); + BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary", randomBytes).endObject()); XContentParser parser = createParser(xcontentType().xContent(), bytes); assertSame(parser.nextToken(), Token.START_OBJECT); @@ -288,7 +288,7 @@ public void testBinaryValue() throws Exception { assertResult("{'binary':null}", () -> builder().startObject().field("binary").value((byte[]) null).endObject()); final byte[] randomBytes = randomBytes(); - BytesReference bytes = builder().startObject().field("binary").value(randomBytes).endObject().bytes(); + BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary").value(randomBytes).endObject()); XContentParser parser = createParser(xcontentType().xContent(), bytes); assertSame(parser.nextToken(), Token.START_OBJECT); @@ -315,7 +315,7 @@ public void testBinaryValueWithOffsetLength() throws Exception { } builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), builder.bytes()); + XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.FIELD_NAME); assertEquals(parser.currentName(), "bin"); @@ -337,7 +337,7 @@ public void testBinaryUTF8() throws Exception { } builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), builder.bytes()); + XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.FIELD_NAME); assertEquals(parser.currentName(), "utf8"); @@ -355,7 +355,7 @@ public void testText() throws Exception { final BytesReference random = new BytesArray(randomBytes()); XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject(); - XContentParser parser = createParser(xcontentType().xContent(), builder.bytes()); + XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.FIELD_NAME); assertEquals(parser.currentName(), "text"); @@ -1015,7 +1015,8 @@ public void testNamedObject() throws IOException { new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text()))); XContentBuilder b = XContentBuilder.builder(xcontentType().xContent()); b.value("test"); - XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, b.bytes().streamInput()); + XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, + BytesReference.bytes(b).streamInput()); assertEquals(test1, p.namedObject(Object.class, "test1", null)); assertEquals(test2, p.namedObject(Object.class, "test2", null)); assertEquals(test2, p.namedObject(Object.class, "deprecated", null)); @@ -1085,7 +1086,7 @@ public static Matcher equalToJson(String json) { private static void assertResult(String expected, Builder builder) throws IOException { // Build the XContentBuilder, convert its bytes to JSON and check it matches - assertThat(XContentHelper.convertToJson(builder.build().bytes(), randomBoolean()), equalToJson(expected)); + assertThat(XContentHelper.convertToJson(BytesReference.bytes(builder.build()), randomBoolean()), equalToJson(expected)); } private static byte[] randomBytes() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java index 7e5bdbd017449..9f24861fdaa0e 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ObjectParserTests.NamedObject; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -78,7 +79,7 @@ public void testRandomOrder() throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); expected.toXContent(builder, ToXContent.EMPTY_PARAMS); builder = shuffleXContent(builder); - BytesReference bytes = builder.bytes(); + BytesReference bytes = BytesReference.bytes(builder); try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { HasCtorArguments parsed = randomFrom(HasCtorArguments.ALL_PARSERS).apply(parser, null); assertEquals(expected.animal, parsed.animal); @@ -91,7 +92,7 @@ public void testRandomOrder() throws Exception { assertEquals(expected.d, parsed.d); } catch (Exception e) { // It is convenient to decorate the error message with the json - throw new Exception("Error parsing: [" + builder.string() + "]", e); + throw new Exception("Error parsing: [" + Strings.toString(builder) + "]", e); } } @@ -428,7 +429,7 @@ private static void declareSetters(ConstructingObjectParser } public void testParseNamedObject() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": {\n" + " \"a\": {}" + "},\"named_in_constructor\": {\n" @@ -443,7 +444,7 @@ public void testParseNamedObject() throws IOException { } public void testParseNamedObjectInOrder() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}}" + "],\"named_in_constructor\": [\n" @@ -458,7 +459,7 @@ public void testParseNamedObjectInOrder() throws IOException { } public void testParseNamedObjectTwoFieldsInArray() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}, \"b\": {}}" + "],\"named_in_constructor\": [\n" @@ -472,7 +473,7 @@ public void testParseNamedObjectTwoFieldsInArray() throws IOException { } public void testParseNamedObjectTwoFieldsInArrayConstructorArg() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}}" + "],\"named_in_constructor\": [\n" @@ -486,7 +487,7 @@ public void testParseNamedObjectTwoFieldsInArrayConstructorArg() throws IOExcept } public void testParseNamedObjectNoFieldsInArray() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {}" + "],\"named_in_constructor\": [\n" @@ -500,7 +501,7 @@ public void testParseNamedObjectNoFieldsInArray() throws IOException { } public void testParseNamedObjectNoFieldsInArrayConstructorArg() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}}" + "],\"named_in_constructor\": [\n" @@ -514,7 +515,7 @@ public void testParseNamedObjectNoFieldsInArrayConstructorArg() throws IOExcepti } public void testParseNamedObjectJunkInArray() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " \"junk\"" + "],\"named_in_constructor\": [\n" @@ -528,7 +529,7 @@ public void testParseNamedObjectJunkInArray() throws IOException { } public void testParseNamedObjectJunkInArrayConstructorArg() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [\n" + " {\"a\": {}}" + "],\"named_in_constructor\": [\n" diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 7b6f14518fecc..6f0c0208b9c75 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -21,6 +21,8 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -358,7 +360,7 @@ public void testAllVariants() throws IOException { } builder.field("string_or_null", nullValue ? null : "5"); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string()); + XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder)); class TestStruct { int int_field; long long_field; @@ -533,7 +535,7 @@ public void testIgnoreUnknownFields() throws IOException { } b.endObject(); b = shuffleXContent(b); - XContentParser parser = createParser(JsonXContent.jsonXContent, b.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b)); class TestStruct { public String test; @@ -557,7 +559,7 @@ public void testIgnoreUnknownObjects() throws IOException { } b.endObject(); b = shuffleXContent(b); - XContentParser parser = createParser(JsonXContent.jsonXContent, b.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b)); class TestStruct { public String test; @@ -585,7 +587,7 @@ public void testIgnoreUnknownArrays() throws IOException { } b.endObject(); b = shuffleXContent(b); - XContentParser parser = createParser(JsonXContent.jsonXContent, b.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(b)); class TestStruct { public String test; } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java index 8a3d0ef9ccf01..65489a997ac7c 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java @@ -21,8 +21,9 @@ import com.fasterxml.jackson.dataformat.cbor.CBORConstants; import com.fasterxml.jackson.dataformat.smile.SmileConstants; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; @@ -53,12 +54,12 @@ private void testGuessType(XContentType type) throws IOException { builder.field("field1", "value1"); builder.endObject(); - assertThat(XContentFactory.xContentType(builder.bytes()), equalTo(type)); - assertThat(XContentFactory.xContentType(builder.bytes().streamInput()), equalTo(type)); + assertThat(XContentFactory.xContentType(BytesReference.bytes(builder)), equalTo(type)); + assertThat(XContentFactory.xContentType(BytesReference.bytes(builder).streamInput()), equalTo(type)); // CBOR is binary, cannot use String if (type != XContentType.CBOR && type != XContentType.SMILE) { - assertThat(XContentFactory.xContentType(builder.string()), equalTo(type)); + assertThat(XContentFactory.xContentType(Strings.toString(builder)), equalTo(type)); } } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java index 8e3246d8b8a59..1f38116f2f7c7 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java @@ -21,6 +21,8 @@ import com.fasterxml.jackson.core.JsonParseException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; @@ -58,7 +60,7 @@ public void testFloat() throws IOException { builder.endObject(); final Number number; - try (XContentParser parser = createParser(xContentType.xContent(), builder.bytes())) { + try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals(field, parser.currentName()); @@ -244,7 +246,7 @@ public void testEmptyList() throws IOException { .startArray("some_array") .endArray().endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -264,7 +266,7 @@ public void testSimpleList() throws IOException { .value(0) .endArray().endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -284,7 +286,7 @@ public void testNestedList() throws IOException { .startArray().value(2).endArray() .endArray().endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -305,7 +307,7 @@ public void testNestedMapInList() throws IOException { .startObject().endObject() .endArray().endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index d3a5e44a89efd..038d8f73c8ab2 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -20,7 +20,9 @@ package org.elasticsearch.common.xcontent.builder; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -93,61 +95,61 @@ public void testRaw() throws IOException { { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"}}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"foo\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); - xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); + xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.field("test", "value"); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.field("test", "value"); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.field("test1", "value1"); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.field("test", "value"); - xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); - xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); + xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); + xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.field("test1", "value1"); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}")); + assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}")); } } public void testSimpleGenerator() throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test", "value").endObject(); - assertThat(builder.string(), equalTo("{\"test\":\"value\"}")); + assertThat(Strings.toString(builder), equalTo("{\"test\":\"value\"}")); builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test", "value").endObject(); - assertThat(builder.string(), equalTo("{\"test\":\"value\"}")); + assertThat(Strings.toString(builder), equalTo("{\"test\":\"value\"}")); } public void testOverloadedList() throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test", Arrays.asList("1", "2")).endObject(); - assertThat(builder.string(), equalTo("{\"test\":[\"1\",\"2\"]}")); + assertThat(Strings.toString(builder), equalTo("{\"test\":[\"1\",\"2\"]}")); } public void testWritingBinaryToStream() throws Exception { @@ -169,7 +171,7 @@ public void testWritingBinaryToStream() throws Exception { public void testByteConversion() throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test_name", (Byte)(byte)120).endObject(); - assertThat(builder.bytes().utf8ToString(), equalTo("{\"test_name\":120}")); + assertThat(BytesReference.bytes(builder).utf8ToString(), equalTo("{\"test_name\":120}")); } public void testDateTypesConversion() throws Exception { @@ -179,23 +181,23 @@ public void testDateTypesConversion() throws Exception { String expectedCalendar = XContentBuilder.DEFAULT_DATE_PRINTER.print(calendar.getTimeInMillis()); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("date", date).endObject(); - assertThat(builder.string(), equalTo("{\"date\":\"" + expectedDate + "\"}")); + assertThat(Strings.toString(builder), equalTo("{\"date\":\"" + expectedDate + "\"}")); builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("calendar", calendar).endObject(); - assertThat(builder.string(), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}")); + assertThat(Strings.toString(builder), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}")); builder = XContentFactory.contentBuilder(XContentType.JSON); Map map = new HashMap<>(); map.put("date", date); builder.map(map); - assertThat(builder.string(), equalTo("{\"date\":\"" + expectedDate + "\"}")); + assertThat(Strings.toString(builder), equalTo("{\"date\":\"" + expectedDate + "\"}")); builder = XContentFactory.contentBuilder(XContentType.JSON); map = new HashMap<>(); map.put("calendar", calendar); builder.map(map); - assertThat(builder.string(), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}")); + assertThat(Strings.toString(builder), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}")); } public void testCopyCurrentStructure() throws Exception { @@ -214,7 +216,7 @@ public void testCopyCurrentStructure() throws Exception { builder.field("fakefield", terms).endObject().endObject().endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); XContentBuilder filterBuilder = null; XContentParser.Token token; @@ -236,7 +238,7 @@ public void testCopyCurrentStructure() throws Exception { } assertNotNull(filterBuilder); - parser = createParser(JsonXContent.jsonXContent, filterBuilder.bytes()); + parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(filterBuilder)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); assertThat(parser.currentName(), equalTo("terms")); @@ -274,7 +276,7 @@ private void checkPathSerialization(Path path) throws IOException { XContentBuilder stringBuilder = XContentFactory.contentBuilder(XContentType.JSON); stringBuilder.startObject().field("file", path.toString()).endObject(); - assertThat(pathBuilder.string(), equalTo(stringBuilder.string())); + assertThat(Strings.toString(pathBuilder), equalTo(Strings.toString(stringBuilder))); } public void testHandlingOfPath_StringName() throws IOException { @@ -287,7 +289,7 @@ public void testHandlingOfPath_StringName() throws IOException { XContentBuilder stringBuilder = XContentFactory.contentBuilder(XContentType.JSON); stringBuilder.startObject().field(name, path.toString()).endObject(); - assertThat(pathBuilder.string(), equalTo(stringBuilder.string())); + assertThat(Strings.toString(pathBuilder), equalTo(Strings.toString(stringBuilder))); } public void testHandlingOfCollectionOfPaths() throws IOException { @@ -299,13 +301,13 @@ public void testHandlingOfCollectionOfPaths() throws IOException { XContentBuilder stringBuilder = XContentFactory.contentBuilder(XContentType.JSON); stringBuilder.startObject().field("file", Arrays.asList(path.toString())).endObject(); - assertThat(pathBuilder.string(), equalTo(stringBuilder.string())); + assertThat(Strings.toString(pathBuilder), equalTo(Strings.toString(stringBuilder))); } public void testIndentIsPlatformIndependent() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); builder.startObject().field("test","foo").startObject("foo").field("foobar", "boom").endObject().endObject(); - String string = builder.string(); + String string = Strings.toString(builder); assertEquals("{\n" + " \"test\" : \"foo\",\n" + " \"foo\" : {\n" + @@ -315,7 +317,7 @@ public void testIndentIsPlatformIndependent() throws IOException { builder = XContentFactory.contentBuilder(XContentType.YAML).prettyPrint(); builder.startObject().field("test","foo").startObject("foo").field("foobar", "boom").endObject().endObject(); - string = builder.string(); + string = Strings.toString(builder); assertEquals("---\n" + "test: \"foo\"\n" + "foo:\n" + @@ -325,7 +327,7 @@ public void testIndentIsPlatformIndependent() throws IOException { public void testRenderGeoPoint() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); builder.startObject().field("foo").value(new GeoPoint(1,2)).endObject(); - String string = builder.string(); + String string = Strings.toString(builder); assertEquals("{\n" + " \"foo\" : {\n" + " \"lat\" : 1.0,\n" + diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java index b95ec03f94c14..146b83c8c17a9 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java @@ -28,7 +28,7 @@ public class CborXContentParserTests extends ESTestCase { public void testEmptyValue() throws IOException { - BytesReference ref = XContentFactory.cborBuilder().startObject().field("field", "").endObject().bytes(); + BytesReference ref = BytesReference.bytes(XContentFactory.cborBuilder().startObject().field("field", "").endObject()); for (int i = 0; i < 2; i++) { // Running this part twice triggers the issue. diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java index 9d95ea6013f3a..c12376bd5516c 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java @@ -1085,12 +1085,12 @@ public void testRawField() throws Exception { .endObject(); Builder sampleWithRaw = builder -> { - BytesReference raw = XContentBuilder.builder(builder.contentType().xContent()) - .startObject() - .field("content", "hello world!") - .endObject() - .bytes(); - return builder.startObject().field("foo", 0).rawField("raw", raw).endObject(); + BytesReference raw = BytesReference + .bytes(XContentBuilder.builder(builder.contentType().xContent()) + .startObject() + .field("content", "hello world!") + .endObject()); + return builder.startObject().field("foo", 0).rawField("raw", raw.streamInput()).endObject(); }; // Test method: rawField(String fieldName, BytesReference content) @@ -1101,11 +1101,11 @@ public void testRawField() throws Exception { testFilter(expectedRawFieldNotFiltered, sampleWithRaw, emptySet(), singleton("f*")); sampleWithRaw = builder -> { - BytesReference raw = XContentBuilder.builder(builder.contentType().xContent()) - .startObject() - . field("content", "hello world!") - .endObject() - .bytes(); + BytesReference raw = BytesReference + .bytes(XContentBuilder.builder(builder.contentType().xContent()) + .startObject() + . field("content", "hello world!") + .endObject()); return builder.startObject().field("foo", 0).rawField("raw", raw.streamInput()).endObject(); }; diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java index ce092e6f2123d..4acb497c46bd9 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.xcontent.support; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -77,7 +78,7 @@ public void testExtractValue() throws Exception { .endObject(); Map map; - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractValue("test", map).toString(), equalTo("value")); @@ -88,7 +89,7 @@ public void testExtractValue() throws Exception { .startObject("path1").startObject("path2").field("test", "value").endObject().endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractValue("path1.path2.test", map).toString(), equalTo("value")); @@ -110,7 +111,7 @@ public void testExtractValue() throws Exception { .startObject("path1").array("test", "value1", "value2").endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } @@ -129,7 +130,7 @@ public void testExtractValue() throws Exception { .endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } @@ -145,7 +146,7 @@ public void testExtractValue() throws Exception { builder = XContentFactory.jsonBuilder().startObject() .field("xxx.yyy", "value") .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractValue("xxx.yyy", map).toString(), equalTo("value")); @@ -154,7 +155,7 @@ public void testExtractValue() throws Exception { .startObject("path1.xxx").startObject("path2.yyy").field("test", "value").endObject().endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractValue("path1.xxx.path2.yyy.test", map).toString(), equalTo("value")); @@ -166,7 +167,7 @@ public void testExtractRawValue() throws Exception { .endObject(); Map map; - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractRawValues("test", map).get(0).toString(), equalTo("value")); @@ -175,7 +176,7 @@ public void testExtractRawValue() throws Exception { .field("test.me", "value") .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractRawValues("test.me", map).get(0).toString(), equalTo("value")); @@ -184,7 +185,7 @@ public void testExtractRawValue() throws Exception { .startObject("path1").startObject("path2").field("test", "value").endObject().endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractRawValues("path1.path2.test", map).get(0).toString(), equalTo("value")); @@ -193,7 +194,7 @@ public void testExtractRawValue() throws Exception { .startObject("path1.xxx").startObject("path2.yyy").field("test", "value").endObject().endObject() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.string())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { map = parser.map(); } assertThat(XContentMapValues.extractRawValues("path1.xxx.path2.yyy.test", map).get(0).toString(), equalTo("value")); @@ -352,7 +353,7 @@ public void testThatFilterIncludesEmptyObjectWhenUsingIncludes() throws Exceptio .endObject() .endObject(); - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"obj"}, Strings.EMPTY_ARRAY); assertThat(mapTuple.v2(), equalTo(filteredSource)); @@ -364,7 +365,7 @@ public void testThatFilterIncludesEmptyObjectWhenUsingExcludes() throws Exceptio .endObject() .endObject(); - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{"nonExistingField"}); assertThat(mapTuple.v2(), equalTo(filteredSource)); @@ -377,7 +378,7 @@ public void testNotOmittingObjectsWithExcludedProperties() throws Exception { .endObject() .endObject(); - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{"obj.f1"}); assertThat(filteredSource.size(), equalTo(1)); @@ -397,7 +398,7 @@ public void testNotOmittingObjectWithNestedExcludedObject() throws Exception { .endObject(); // implicit include - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{"*.obj2"}); assertThat(filteredSource.size(), equalTo(1)); @@ -427,7 +428,7 @@ public void testIncludingObjectWithNestedIncludedObject() throws Exception { .endObject() .endObject(); - Tuple> mapTuple = convertToMap(builder.bytes(), true, builder.contentType()); + Tuple> mapTuple = convertToMap(BytesReference.bytes(builder), true, builder.contentType()); Map filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"*.obj2"}, Strings.EMPTY_ARRAY); assertThat(filteredSource.size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java index 3f6ec53f4f69f..1d12defe6988d 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.xcontent.support.filtering; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -69,7 +71,7 @@ public void testSingleFieldObject() throws IOException { } static void assertXContentBuilderAsString(final XContentBuilder expected, final XContentBuilder actual) { - assertThat(actual.bytes().utf8ToString(), is(expected.bytes().utf8ToString())); + assertThat(Strings.toString(actual), is(Strings.toString(expected))); } static void assertXContentBuilderAsBytes(final XContentBuilder expected, final XContentBuilder actual) { @@ -77,10 +79,10 @@ static void assertXContentBuilderAsBytes(final XContentBuilder expected, final X XContent xContent = XContentFactory.xContent(actual.contentType()); XContentParser jsonParser = xContent.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, expected.bytes().streamInput()); + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(expected).streamInput()); XContentParser testParser = xContent.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, actual.bytes().streamInput()); + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(actual).streamInput()); while (true) { XContentParser.Token token1 = jsonParser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 31cfa30a49eb0..e51177c318ca8 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -286,6 +287,6 @@ public void testDiscoveryStats() throws Exception { stats.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertThat(builder.string(), equalTo(expectedStatsJsonResponse)); + assertThat(Strings.toString(builder), equalTo(expectedStatsJsonResponse)); } } diff --git a/server/src/test/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/test/java/org/elasticsearch/document/DocumentActionsIT.java index 88bab97a1f3ff..cfcb48f4a4899 100644 --- a/server/src/test/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/test/java/org/elasticsearch/document/DocumentActionsIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -97,10 +98,10 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { getResult = client().prepareGet("test", "type1", "1").execute().actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); assertThat("cycle(map) #" + i, (String) getResult.getSourceAsMap().get("name"), equalTo("test")); getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } @@ -149,10 +150,10 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { getResult = client().get(getRequest("test").type("type1").id("1")).actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); String ste1 = getResult.getSourceAsString(); - String ste2 = source("2", "test2").string(); + String ste2 = Strings.toString(source("2", "test2")); assertThat("cycle #" + i, ste1, equalTo(ste2)); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } @@ -236,11 +237,11 @@ public void testBulk() throws Exception { assertThat("cycle #" + i, getResult.isExists(), equalTo(false)); getResult = client().get(getRequest("test").type("type1").id("2")).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("2", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("2", "test")))); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); getResult = client().get(getRequest("test").type("type1").id(generatedId3)).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("3", "test").string())); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("3", "test")))); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } } diff --git a/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 23254e81060a0..154d702e7fb77 100644 --- a/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -85,9 +86,9 @@ public void testOneNodeRecoverFromGateway() throws Exception { internalCluster().startNode(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("appAccountIds").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); client().prepareIndex("test", "type1", "10990239").setSource(jsonBuilder().startObject() @@ -154,9 +155,9 @@ private Map assertAndCapturePrimaryTerms(Map pre public void testSingleNodeNoFlush() throws Exception { internalCluster().startNode(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("field").field("type", "text").endObject().startObject("num").field("type", "integer").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); // note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test. int numberOfShards = numberOfShards(); assertAcked(prepareCreate("test").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards()) diff --git a/server/src/test/java/org/elasticsearch/get/GetActionIT.java b/server/src/test/java/org/elasticsearch/get/GetActionIT.java index a327ebefddc1c..efae2ada5b010 100644 --- a/server/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -249,11 +249,11 @@ public void testSimpleMultiGet() throws Exception { } public void testGetDocWithMultivaluedFields() throws Exception { - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field").field("type", "text").field("store", true).endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertAcked(prepareCreate("test") .addMapping("type1", mapping1, XContentType.JSON)); ensureGreen(); @@ -289,16 +289,16 @@ public void testGetDocWithMultivaluedFields() throws Exception { public void testGetDocWithMultivaluedFieldsMultiTypeBWC() throws Exception { assertTrue("remove this multi type test", Version.CURRENT.before(Version.fromString("7.0.0"))); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field").field("type", "text").field("store", true).endObject() .endObject() - .endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2") + .endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("field").field("type", "text").field("store", true).endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertAcked(prepareCreate("test") .addMapping("type1", mapping1, XContentType.JSON) .addMapping("type2", mapping2, XContentType.JSON) @@ -692,7 +692,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject().endObject().endObject() .endObject().endObject().endObject())); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .startArray("field1") .startObject() .startObject("field2") @@ -713,7 +713,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); logger.info("indexing documents"); diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index df6aa962f317a..74d2a0961030e 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TopDocs; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -60,7 +61,7 @@ public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOExc XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - return new CompressedXContent(builder.string()); + return new CompressedXContent(Strings.toString(builder)); } public void testBaseAsyncTask() throws InterruptedException, IOException { diff --git a/server/src/test/java/org/elasticsearch/index/IndexTests.java b/server/src/test/java/org/elasticsearch/index/IndexTests.java index c39a43e849060..fda181614ffa1 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -55,7 +56,7 @@ public void testXContent() throws IOException { final Index original = new Index(name, uuid); final XContentBuilder builder = JsonXContent.contentBuilder(); original.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); parser.nextToken(); // the beginning of the parser assertThat(Index.fromXContent(parser), equalTo(original)); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index ff5166e8f1a14..49e6c6597e180 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -45,7 +45,7 @@ public class IndexingSlowLogTests extends ESTestCase { public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { - BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes(); + BytesReference source = BytesReference.bytes(JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject()); ParsedDocument pd = new ParsedDocument(new NumericDocValuesField("version", 1), SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id", "test", null, null, source, XContentType.JSON, null); Index index = new Index("foo", "123"); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index b20972adeda08..d0ffdbe229dd6 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -80,9 +81,9 @@ public void testThatAnalyzersAreUsedInMapping() throws IOException { NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(analyzerName, AnalyzerScope.INDEX, randomPreBuiltAnalyzer.getAnalyzer(randomVersion)).get(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", analyzerName).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldMapper fieldMapper = docMapper.mappers().getMapper("field"); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index 64dcf0a0943b9..3d811832d2951 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -21,6 +21,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -39,14 +41,14 @@ protected boolean hasDocValues() { } public void testDocValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("test") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("test") .startObject("properties") .startObject("field") .field("type", "binary") .field("doc_values", true) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); final DocumentMapper mapper = mapperService.documentMapperParser().parse("test", new CompressedXContent(mapping)); @@ -62,16 +64,16 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - ParsedDocument d = mapper.parse(SourceToParse.source("test", "test", "1", doc.bytes(), XContentType.JSON)); + ParsedDocument d = mapper.parse(SourceToParse.source("test", "test", "1", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); BytesRef bytes1 = randomBytes(); doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1).endObject(); - d = mapper.parse(SourceToParse.source("test", "test", "2", doc.bytes(), XContentType.JSON)); + d = mapper.parse(SourceToParse.source("test", "test", "2", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); doc = XContentFactory.jsonBuilder().startObject().endObject(); - d = mapper.parse(SourceToParse.source("test", "test", "3", doc.bytes(), XContentType.JSON)); + d = mapper.parse(SourceToParse.source("test", "test", "3", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); // test remove duplicate value @@ -87,7 +89,7 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - d = mapper.parse(SourceToParse.source("test", "test", "4", doc.bytes(), XContentType.JSON)); + d = mapper.parse(SourceToParse.source("test", "test", "4", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); IndexFieldData indexFieldData = getForField("field"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java index 4ccc8bc215fd7..2d445b761d12d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -32,7 +33,7 @@ public void testUpdateDefaultSearchAnalyzer() throws Exception { IndexService indexService = createIndex("test", Settings.builder() .put("index.analysis.analyzer.default_search.type", "custom") .put("index.analysis.analyzer.default_search.tokenizer", "standard").build()); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject()); indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, indexService.mapperService().documentMapper("_doc").mapping().toString()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index fac6e4c84b18c..e4cd5731daafa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -20,27 +20,23 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.Arrays; import java.util.Collection; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -53,13 +49,13 @@ protected Collection> getPlugins() { } public void testDefaultMapping() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "binary") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -69,14 +65,14 @@ public void testDefaultMapping() throws Exception { } public void testStoredValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "binary") .field("store", true) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -93,8 +89,8 @@ public void testStoredValue() throws IOException { assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "id", - XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "id", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), XContentType.JSON)); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); assertEquals(new BytesRef(value), indexedValue); @@ -106,9 +102,9 @@ public void testStoredValue() throws IOException { public void testEmptyName() throws IOException { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "binary").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index e6a1c0a69d81a..05ddcc995639b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -75,17 +76,17 @@ protected Collection> getPlugins() { } public void testDefaults() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", true) - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", true) + .endObject()), XContentType.JSON)); try (Directory dir = new RAMDirectory(); @@ -105,36 +106,36 @@ public void testDefaults() throws IOException { } public void testSerialization() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "boolean").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); FieldMapper mapper = defaultMapper.mappers().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertEquals("{\"field\":{\"type\":\"boolean\"}}", builder.string()); + assertEquals("{\"field\":{\"type\":\"boolean\"}}", Strings.toString(builder)); // now change some parameters - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "boolean") .field("doc_values", "false") .field("null_value", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); defaultMapper = parser.parse("type", new CompressedXContent(mapping)); mapper = defaultMapper.mappers().getMapper("field"); builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertEquals("{\"field\":{\"type\":\"boolean\",\"doc_values\":false,\"null_value\":true}}", builder.string()); + assertEquals("{\"field\":{\"type\":\"boolean\",\"doc_values\":false,\"null_value\":true}}", Strings.toString(builder)); } public void testParsesPreEs6BooleansLenient() throws IOException { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") @@ -146,18 +147,18 @@ public void testParsesPreEs6BooleansLenient() throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); DocumentMapper defaultMapper = preEs6Parser.parse("type", new CompressedXContent(mapping)); String falsy = randomFrom("false", "off", "no", "0"); String truthy = randomFrom("true", "on", "yes", "1"); - ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("legacy", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", falsy) - .field("field2", truthy) - .endObject() - .bytes(), + ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("legacy", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", falsy) + .field("field2", truthy) + .endObject()), XContentType.JSON)); Document doc = parsedDoc.rootDoc(); assertEquals("F", doc.getField("field1").stringValue()); @@ -177,7 +178,7 @@ public void testParsesPreEs6BooleansLenient() throws IOException { } public void testParsesEs6BooleansStrict() throws IOException { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") @@ -186,20 +187,20 @@ public void testParsesEs6BooleansStrict() throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() + BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() // omit "false"/"true" here as they should still be parsed correctly .field("field", randomFrom("off", "no", "0", "on", "yes", "1")) - .endObject().bytes(); - MapperParsingException ex = expectThrows(MapperParsingException.class, + .endObject()); + MapperParsingException ex = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON))); assertEquals("failed to parse [field]", ex.getMessage()); } public void testMultiFields() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "boolean") @@ -209,20 +210,20 @@ public void testMultiFields() throws IOException { .endObject() .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = indexService.mapperService() .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); - BytesReference source = XContentFactory.jsonBuilder() + BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", false) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); assertNotNull(doc.rootDoc().getField("field.as_string")); } public void testDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("bool1") .field("type", "boolean") @@ -236,17 +237,17 @@ public void testDocValues() throws Exception { .field("index", true) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("bool1", true) - .field("bool2", true) - .field("bool3", true) - .endObject() - .bytes(), + ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("bool1", true) + .field("bool2", true) + .field("bool3", true) + .endObject()), XContentType.JSON)); Document doc = parsedDoc.rootDoc(); IndexableField[] fields = doc.getFields("bool1"); @@ -263,9 +264,9 @@ public void testDocValues() throws Exception { public void testEmptyName() throws IOException { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "boolean").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java index ac14f2905cf3b..09394b396679f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java @@ -19,26 +19,27 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; public class CamelCaseFieldNameTests extends ESSingleNodeTestCase { public void testCamelCaseFieldNameStaysAsIs() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper documentMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() - .field("thisIsCamelCase", "value1") - .endObject().bytes(), - XContentType.JSON)); + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject() + .field("thisIsCamelCase", "value1") + .endObject()), + XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 74183ae864a60..be03a28a0aad2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -29,6 +29,8 @@ import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; @@ -51,11 +53,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { public void testDefaultConfiguration() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); @@ -80,7 +82,7 @@ public void testDefaultConfiguration() throws IOException { } public void testCompletionAnalyzerSettings() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .field("analyzer", "simple") @@ -88,7 +90,7 @@ public void testCompletionAnalyzerSettings() throws Exception { .field("preserve_separators", false) .field("preserve_position_increments", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); @@ -114,7 +116,7 @@ public void testCompletionAnalyzerSettings() throws Exception { } public void testTypeParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .field("analyzer", "simple") @@ -123,7 +125,7 @@ public void testTypeParsing() throws Exception { .field("preserve_position_increments", true) .field("max_input_length", 14) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); @@ -134,7 +136,7 @@ public void testTypeParsing() throws Exception { XContentBuilder builder = jsonBuilder().startObject(); completionFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); builder.close(); - Map serializedMap = createParser(JsonXContent.jsonXContent, builder.bytes()).map(); + Map serializedMap = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)).map(); Map configMap = (Map) serializedMap.get("completion"); assertThat(configMap.get("analyzer").toString(), is("simple")); assertThat(configMap.get("search_analyzer").toString(), is("standard")); @@ -144,196 +146,196 @@ public void testTypeParsing() throws Exception { } public void testParsingMinimal() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", "suggestion") - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", "suggestion") + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } public void testParsingFailure() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); MapperParsingException e = expectThrows(MapperParsingException.class, () -> - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", 1.0) - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", 1.0) + .endObject()), XContentType.JSON))); assertEquals("failed to parse [completion]: expected text or object, but got VALUE_NUMBER", e.getCause().getMessage()); } public void testParsingMultiValued() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .array("completion", "suggestion1", "suggestion2") - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("completion", "suggestion1", "suggestion2") + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 2); } public void testParsingWithWeight() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("completion") - .field("input", "suggestion") - .field("weight", 2) - .endObject() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("completion") + .field("input", "suggestion") + .field("weight", 2) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 1); } public void testParsingMultiValueWithWeight() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("completion") - .array("input", "suggestion1", "suggestion2", "suggestion3") - .field("weight", 2) - .endObject() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("completion") + .array("input", "suggestion1", "suggestion2", "suggestion3") + .field("weight", 2) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } public void testParsingFull() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .field("input", "suggestion1") - .field("weight", 3) - .endObject() - .startObject() - .field("input", "suggestion2") - .field("weight", 4) - .endObject() - .startObject() - .field("input", "suggestion3") - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .field("input", "suggestion1") + .field("weight", 3) + .endObject() + .startObject() + .field("input", "suggestion2") + .field("weight", 4) + .endObject() + .startObject() + .field("input", "suggestion3") + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 3); } public void testParsingMixed() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion1", "suggestion2") - .field("weight", 3) - .endObject() - .startObject() - .field("input", "suggestion3") - .field("weight", 4) - .endObject() - .startObject() - .array("input", "suggestion4", "suggestion5", "suggestion6") - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion1", "suggestion2") + .field("weight", 3) + .endObject() + .startObject() + .field("input", "suggestion3") + .field("weight", 4) + .endObject() + .startObject() + .array("input", "suggestion4", "suggestion5", "suggestion6") + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertSuggestFields(fields, 6); } public void testNonContextEnabledParsingWithContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("field1") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("field1") - .field("input", "suggestion1") - .startObject("contexts") - .field("ctx", "ctx2") - .endObject() - .field("weight", 3) - .endObject() - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("field1") + .field("input", "suggestion1") + .startObject("contexts") + .field("ctx", "ctx2") + .endObject() + .field("weight", 3) + .endObject() + .endObject()), XContentType.JSON)); fail("Supplying contexts to a non context-enabled field should error"); } catch (MapperParsingException e) { @@ -342,22 +344,22 @@ public void testNonContextEnabledParsingWithContexts() throws Exception { } public void testFieldValueValidation() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); CharsRefBuilder charsRefBuilder = new CharsRefBuilder(); charsRefBuilder.append("sugg"); charsRefBuilder.setCharAt(2, '\u001F'); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", charsRefBuilder.get().toString()) - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", charsRefBuilder.get().toString()) + .endObject()), XContentType.JSON)); fail("No error indexing value with reserved character [0x1F]"); } catch (MapperParsingException e) { @@ -368,11 +370,11 @@ public void testFieldValueValidation() throws Exception { charsRefBuilder.setCharAt(2, '\u0000'); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", charsRefBuilder.get().toString()) - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", charsRefBuilder.get().toString()) + .endObject()), XContentType.JSON)); fail("No error indexing value with reserved character [0x0]"); } catch (MapperParsingException e) { @@ -383,11 +385,11 @@ public void testFieldValueValidation() throws Exception { charsRefBuilder.setCharAt(2, '\u001E'); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", XContentFactory.jsonBuilder() - .startObject() - .field("completion", charsRefBuilder.get().toString()) - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("completion", charsRefBuilder.get().toString()) + .endObject()), XContentType.JSON)); fail("No error indexing value with reserved character [0x1E]"); } catch (MapperParsingException e) { @@ -398,11 +400,11 @@ public void testFieldValueValidation() throws Exception { } public void testPrefixQueryType() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -412,11 +414,11 @@ public void testPrefixQueryType() throws Exception { } public void testFuzzyQueryType() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -429,11 +431,11 @@ public void testFuzzyQueryType() throws Exception { } public void testRegexQueryType() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -456,9 +458,9 @@ private static void assertSuggestFields(IndexableField[] fields, int expected) { public void testEmptyName() throws IOException { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "completion").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java index 637a25b24d60a..0015919b674ae 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -69,12 +70,12 @@ public void testDynamicTemplateCopyTo() throws Exception { } public void testDynamicObjectCopyTo() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc").startObject("properties") .startObject("foo") .field("type", "text") .field("copy_to", "root.top.child") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); assertAcked( client().admin().indices().prepareCreate("test-idx") .addMapping("_doc", mapping, XContentType.JSON) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java index a0b6a1458e24f..b68b9c6f7dcae 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; @@ -46,7 +47,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { @SuppressWarnings("unchecked") public void testCopyToFieldsParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("copy_test") .field("type", "text") .array("copy_to", "another_field", "cyclic_test") @@ -66,7 +67,7 @@ public void testCopyToFieldsParsing() throws Exception { .field("doc_values", false) .array("copy_to", "another_field", "new_field") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type1").setSource(mapping, XContentType.JSON).get(); @@ -79,7 +80,7 @@ public void testCopyToFieldsParsing() throws Exception { stringFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); builder.close(); Map serializedMap; - try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes())) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { serializedMap = parser.map(); } Map copyTestMap = (Map) serializedMap.get("copy_test"); @@ -90,11 +91,11 @@ public void testCopyToFieldsParsing() throws Exception { assertThat(copyToList.get(1), equalTo("cyclic_test")); // Check data parsing - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .field("cyclic_test", "bar") .field("int_to_str_test", 42) - .endObject().bytes(); + .endObject()); ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)); ParseContext.Document doc = parsedDoc.rootDoc(); @@ -126,7 +127,7 @@ public void testCopyToFieldsParsing() throws Exception { } public void testCopyToFieldsInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("copy_test") .field("type", "text") @@ -142,16 +143,16 @@ public void testCopyToFieldsInnerObjectParsing() throws Exception { .endObject() .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .startObject("foo").startObject("bar").field("baz", "zoo").endObject().endObject() - .endObject().bytes(); + .endObject()); - ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -162,21 +163,21 @@ public void testCopyToFieldsInnerObjectParsing() throws Exception { } public void testCopyToDynamicInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("copy_test") .field("type", "text") .field("copy_to", "very.inner.field") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .field("new_field", "bar") - .endObject().bytes(); + .endObject()); ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); @@ -191,7 +192,7 @@ public void testCopyToDynamicInnerObjectParsing() throws Exception { } public void testCopyToDynamicInnerInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("copy_test") .field("type", "text") @@ -206,16 +207,16 @@ public void testCopyToDynamicInnerInnerObjectParsing() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .field("new_field", "bar") - .endObject().bytes(); + .endObject()); - ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -228,7 +229,7 @@ public void testCopyToDynamicInnerInnerObjectParsing() throws Exception { } public void testCopyToStrictDynamicInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .field("dynamic", "strict") .startObject("properties") .startObject("copy_test") @@ -236,13 +237,13 @@ public void testCopyToStrictDynamicInnerObjectParsing() throws Exception { .field("copy_to", "very.inner.field") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") - .endObject().bytes(); + .endObject()); try { docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); @@ -253,7 +254,7 @@ public void testCopyToStrictDynamicInnerObjectParsing() throws Exception { } public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("copy_test") .field("type", "text") @@ -270,13 +271,13 @@ public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception { .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") - .endObject().bytes(); + .endObject()); try { docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); @@ -287,23 +288,23 @@ public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception { } public void testCopyToFieldMerge() throws Exception { - String mappingBefore = jsonBuilder().startObject().startObject("type1").startObject("properties") + String mappingBefore = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("copy_test") .field("type", "text") .array("copy_to", "foo", "bar") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); - String mappingAfter = jsonBuilder().startObject().startObject("type1").startObject("properties") + String mappingAfter = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("copy_test") .field("type", "text") .array("copy_to", "baz", "bar") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), MapperService.MergeReason.MAPPING_UPDATE, false); @@ -357,7 +358,7 @@ public void testCopyToNestedField() throws Exception { .endObject() .endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); XContentBuilder jsonDoc = XContentFactory.jsonBuilder() .startObject() @@ -382,7 +383,7 @@ public void testCopyToNestedField() throws Exception { .endArray() .endObject(); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", jsonDoc.bytes(), XContentType.JSON)); + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc), XContentType.JSON)); assertEquals(6, doc.docs().size()); Document nested = doc.docs().get(0); @@ -437,7 +438,7 @@ public void testCopyToChildNested() throws Exception { .endObject() .endObject(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(rootToNestedMapping.bytes()), + () -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)), MergeReason.MAPPING_UPDATE, false)); assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings")); @@ -465,7 +466,7 @@ public void testCopyToChildNested() throws Exception { .endObject() .endObject(); e = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(nestedToNestedMapping.bytes()), + () -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(nestedToNestedMapping)), MergeReason.MAPPING_UPDATE, false)); } @@ -495,7 +496,7 @@ public void testCopyToSiblingNested() throws Exception { .endObject() .endObject(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(rootToNestedMapping.bytes()), + () -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)), MergeReason.MAPPING_UPDATE, false)); assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings")); } @@ -516,13 +517,13 @@ public void testCopyToObject() throws Exception { .endObject() .endObject(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(rootToNestedMapping.bytes()), + () -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)), MergeReason.MAPPING_UPDATE, false)); assertThat(e.getMessage(), Matchers.startsWith("Cannot copy to field [target] since it is mapped as an object")); } public void testCopyToDynamicNestedObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startArray("dynamic_templates") .startObject() .startObject("objects") @@ -539,14 +540,14 @@ public void testCopyToDynamicNestedObjectParsing() throws Exception { .field("copy_to", "very.inner.field") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - BytesReference json = jsonBuilder().startObject() + BytesReference json = BytesReference.bytes(jsonBuilder().startObject() .field("copy_test", "foo") .field("new_field", "bar") - .endObject().bytes(); + .endObject()); try { docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); @@ -569,7 +570,7 @@ private void assertFieldValue(Document doc, String field, Number... expected) { } public void testCopyToMultiField() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("my_field") .field("type", "keyword") @@ -581,7 +582,7 @@ public void testCopyToMultiField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -590,7 +591,7 @@ public void testCopyToMultiField() throws Exception { } public void testNestedCopyTo() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("n") .field("type", "nested") @@ -605,14 +606,14 @@ public void testNestedCopyTo() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, randomBoolean()); // no exception } public void testNestedCopyToMultiField() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("n") .field("type", "nested") @@ -629,7 +630,7 @@ public void testNestedCopyToMultiField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -638,7 +639,7 @@ public void testNestedCopyToMultiField() throws Exception { } public void testCopyFromMultiField() throws Exception { - String mapping = jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("my_field") .field("type", "keyword") @@ -650,7 +651,7 @@ public void testCopyFromMultiField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); MapperParsingException e = expectThrows(MapperParsingException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 5776e9d618e3b..d6b13750b3f3d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -21,6 +21,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -57,19 +59,19 @@ protected Collection> getPlugins() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-11") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -86,19 +88,19 @@ public void testDefaults() throws Exception { } public void testNotIndexed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-11") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -108,19 +110,19 @@ public void testNotIndexed() throws Exception { } public void testNoDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-11") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -130,19 +132,19 @@ public void testNoDocValues() throws Exception { } public void testStore() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-11") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -157,35 +159,35 @@ public void testStore() throws Exception { } public void testIgnoreMalformed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2016-03-99") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-99") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("Cannot parse \"2016-03-99\"")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date") .field("ignore_malformed", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", ":1") - .endObject() - .bytes(), + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", ":1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -193,20 +195,20 @@ public void testIgnoreMalformed() throws Exception { } public void testChangeFormat() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date") .field("format", "epoch_second").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 1457654400) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 1457654400) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -216,10 +218,10 @@ public void testChangeFormat() throws IOException { } public void testFloatEpochFormat() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date") .field("format", "epoch_millis").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -228,11 +230,11 @@ public void testFloatEpochFormat() throws IOException { double epochFloatMillisFromEpoch = (randomDouble() * 2 - 1) * 1000000; String epochFloatValue = String.format(Locale.US, "%f", epochFloatMillisFromEpoch); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", epochFloatValue) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", epochFloatValue) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -242,44 +244,44 @@ public void testFloatEpochFormat() throws IOException { } public void testChangeLocale() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date").field("locale", "fr").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 1457654400) - .endObject() - .bytes(), + mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 1457654400) + .endObject()), XContentType.JSON)); } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") .field("type", "date") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -287,16 +289,16 @@ public void testNullValue() throws IOException { .field("null_value", "2016-03-11") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -312,7 +314,7 @@ public void testNullValue() throws IOException { } public void testNullConfigValuesFail() throws MapperParsingException, IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -320,17 +322,17 @@ public void testNullConfigValuesFail() throws MapperParsingException, IOExceptio .field("format", (String) null) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Exception e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertEquals("[format] must not have a [null] value", e.getMessage()); } public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "date") .field("format", "epoch_second").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) @@ -345,7 +347,7 @@ public void testEmptyName() throws IOException { public void testTimeZoneParsing() throws Exception { final String timeZonePattern = "yyyy-MM-dd" + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'"); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -353,7 +355,7 @@ public void testTimeZoneParsing() throws Exception { .field("format", timeZonePattern) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); @@ -361,11 +363,11 @@ public void testTimeZoneParsing() throws Exception { final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone(); final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate)) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate)) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -375,20 +377,20 @@ public void testTimeZoneParsing() throws Exception { } public void testMergeDate() throws IOException { - String initMapping = XContentFactory.jsonBuilder().startObject().startObject("movie") + String initMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("movie") .startObject("properties") .startObject("release_date").field("type", "date").field("format", "yyyy/MM/dd").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper initMapper = indexService.mapperService().merge("movie", new CompressedXContent(initMapping), MapperService.MergeReason.MAPPING_UPDATE, randomBoolean()); assertThat(initMapper.mappers().getMapper("release_date"), notNullValue()); assertFalse(initMapper.mappers().getMapper("release_date").fieldType().stored()); - String updateFormatMapping = XContentFactory.jsonBuilder().startObject().startObject("movie") + String updateFormatMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("movie") .startObject("properties") .startObject("release_date").field("type", "date").field("format", "epoch_millis").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); Exception e = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("movie", new CompressedXContent(updateFormatMapping), @@ -397,14 +399,14 @@ public void testMergeDate() throws IOException { } public void testMergeText() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("date").field("type", "date").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = indexService.mapperService().parse("_doc", new CompressedXContent(mapping), false); - String mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_doc") + String mappingUpdate = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("date").field("type", "text").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper update = indexService.mapperService().parse("_doc", new CompressedXContent(mappingUpdate), false); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java index 68389112bfd51..ae2432301b27a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -41,16 +42,16 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { public void test1Merge() throws Exception { - String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") + String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("name").field("type", "text").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping)); - String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") + String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("name").field("type", "text").endObject() .startObject("age").field("type", "integer").endObject() .startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping)); DocumentMapper merged = stage1.merge(stage2.mapping(), false); @@ -64,11 +65,11 @@ public void test1Merge() throws Exception { public void testMergeObjectDynamic() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject().string(); + String objectMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject()); DocumentMapper mapper = parser.parse("type1", new CompressedXContent(objectMapping)); assertNull(mapper.root().dynamic()); - String withDynamicMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject().string(); + String withDynamicMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject()); DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping)); assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE)); @@ -78,13 +79,13 @@ public void testMergeObjectDynamic() throws Exception { public void testMergeObjectAndNested() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") + String objectMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("obj").field("type", "object").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper objectMapper = parser.parse("type1", new CompressedXContent(objectMapping)); - String nestedMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") + String nestedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("obj").field("type", "nested").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping)); try { @@ -104,12 +105,12 @@ public void testMergeObjectAndNested() throws Exception { public void testMergeSearchAnalyzer() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() - .endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1)); DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); @@ -122,12 +123,12 @@ public void testMergeSearchAnalyzer() throws Exception { public void testChangeSearchAnalyzerToDefault() throws Exception { MapperService mapperService = createIndex("test").mapperService(); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() - .endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE, false); DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE, false); @@ -206,23 +207,23 @@ public void run() { } public void testDoNotRepeatOriginalMapping() throws IOException { - CompressedXContent mapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent mapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("_source") .field("enabled", false) .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); MapperService mapperService = createIndex("test").mapperService(); mapperService.merge("type", mapping, MapperService.MergeReason.MAPPING_UPDATE, false); - CompressedXContent update = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent update = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("foo") .field("type", "text") .endObject() .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); DocumentMapper mapper = mapperService.merge("type", update, MapperService.MergeReason.MAPPING_UPDATE, false); assertNotNull(mapper.mappers().getMapper("foo")); @@ -232,28 +233,28 @@ public void testDoNotRepeatOriginalMapping() throws IOException { public void testMergeChildType() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String initMapping = XContentFactory.jsonBuilder().startObject().startObject("child") + String initMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child") .startObject("_parent").field("type", "parent").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper initMapper = parser.parse("child", new CompressedXContent(initMapping)); assertThat(initMapper.mappers().getMapper("_parent#parent"), notNullValue()); - String updatedMapping1 = XContentFactory.jsonBuilder().startObject().startObject("child") + String updatedMapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child") .startObject("properties") .startObject("name").field("type", "text").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper updatedMapper1 = parser.parse("child", new CompressedXContent(updatedMapping1)); DocumentMapper mergedMapper1 = initMapper.merge(updatedMapper1.mapping(), false); assertThat(mergedMapper1.mappers().getMapper("_parent#parent"), notNullValue()); assertThat(mergedMapper1.mappers().getMapper("name"), notNullValue()); - String updatedMapping2 = XContentFactory.jsonBuilder().startObject().startObject("child") + String updatedMapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child") .startObject("_parent").field("type", "parent").endObject() .startObject("properties") .startObject("age").field("type", "byte").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper updatedMapper2 = parser.parse("child", new CompressedXContent(updatedMapping2)); DocumentMapper mergedMapper2 = mergedMapper1.merge(updatedMapper2.mapping(), false); @@ -261,9 +262,9 @@ public void testMergeChildType() throws IOException { assertThat(mergedMapper2.mappers().getMapper("name"), notNullValue()); assertThat(mergedMapper2.mappers().getMapper("age"), notNullValue()); - String modParentMapping = XContentFactory.jsonBuilder().startObject().startObject("child") + String modParentMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child") .startObject("_parent").field("type", "new_parent").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper modParentMapper = parser.parse("child", new CompressedXContent(modParentMapping)); Exception e = expectThrows(IllegalArgumentException.class, () -> initMapper.merge(modParentMapper.mapping(), false)); assertThat(e.getMessage(), containsString("The _parent field's type option can't be changed: [parent]->[new_parent]")); @@ -272,19 +273,19 @@ public void testMergeChildType() throws IOException { public void testMergeAddingParent() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String initMapping = XContentFactory.jsonBuilder().startObject().startObject("cowboy") + String initMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("cowboy") .startObject("properties") .startObject("name").field("type", "text").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper initMapper = parser.parse("cowboy", new CompressedXContent(initMapping)); assertThat(initMapper.mappers().getMapper("name"), notNullValue()); - String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("cowboy") + String updatedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("cowboy") .startObject("_parent").field("type", "parent").endObject() .startObject("properties") .startObject("age").field("type", "byte").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper updatedMapper = parser.parse("cowboy", new CompressedXContent(updatedMapping)); Exception e = expectThrows(IllegalArgumentException.class, () -> initMapper.merge(updatedMapper.mapping(), false)); assertThat(e.getMessage(), containsString("The _parent field's type option can't be changed: [null]->[parent]")); @@ -293,41 +294,41 @@ public void testMergeAddingParent() throws IOException { public void testMergeMeta() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String initMapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("test") - .startObject("_meta") - .field("foo").value("bar") + String initMapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .startObject("_meta") + .field("foo").value("bar") + .endObject() .endObject() - .endObject() - .endObject() - .string(); + .endObject()); DocumentMapper initMapper = parser.parse("test", new CompressedXContent(initMapping)); assertThat(initMapper.meta().get("foo"), equalTo("bar")); - String updateMapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("test") - .startObject("properties") - .startObject("name").field("type", "text").endObject() + String updateMapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .startObject("properties") + .startObject("name").field("type", "text").endObject() + .endObject() .endObject() - .endObject() - .endObject() - .string(); + .endObject()); DocumentMapper updatedMapper = parser.parse("test", new CompressedXContent(updateMapping)); assertThat(initMapper.merge(updatedMapper.mapping(), true).meta().get("foo"), equalTo("bar")); - updateMapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("test") - .startObject("_meta") - .field("foo").value("new_bar") + updateMapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .startObject("_meta") + .field("foo").value("new_bar") + .endObject() .endObject() - .endObject() - .endObject() - .string(); + .endObject()); updatedMapper = parser.parse("test", new CompressedXContent(updateMapping)); assertThat(initMapper.merge(updatedMapper.mapping(), true).meta().get("foo"), equalTo("new_bar")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java index a29566bfa4ee5..268b03d046c1c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java @@ -19,11 +19,10 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; @@ -31,8 +30,8 @@ // TODO: move this test...it doesn't need to be by itself public class DocumentMapperParserTests extends ESSingleNodeTestCase { public void testTypeLevel() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -42,10 +41,10 @@ public void testTypeLevel() throws Exception { public void testFieldNameWithDots() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo.bar").field("type", "text").endObject() .startObject("foo.baz").field("type", "keyword").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = mapperParser.parse("type", new CompressedXContent(mapping)); assertNotNull(docMapper.mappers().getMapper("foo.bar")); assertNotNull(docMapper.mappers().getMapper("foo.baz")); @@ -55,11 +54,11 @@ public void testFieldNameWithDots() throws Exception { public void testFieldNameWithDeepDots() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo.bar").field("type", "text").endObject() .startObject("foo.baz").startObject("properties") .startObject("deep.field").field("type", "keyword").endObject().endObject() - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper docMapper = mapperParser.parse("type", new CompressedXContent(mapping)); assertNotNull(docMapper.mappers().getMapper("foo.bar")); assertNotNull(docMapper.mappers().getMapper("foo.baz.deep.field")); @@ -69,10 +68,10 @@ public void testFieldNameWithDeepDots() throws Exception { public void testFieldNameWithDotsConflict() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").field("type", "text").endObject() .startObject("foo.baz").field("type", "keyword").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapperParser.parse("type", new CompressedXContent(mapping))); assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] of different type")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index b3b33cf0dd6ad..dd4717a1a0f6a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -60,17 +61,17 @@ protected Collection> getPlugins() { public void testFieldDisabled() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").field("enabled", false).endObject() .startObject("bar").field("type", "integer").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo", "1234") .field("bar", 10) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.rootDoc().getField("foo")); assertNotNull(doc.rootDoc().getField("bar")); @@ -79,14 +80,14 @@ public void testFieldDisabled() throws Exception { public void testDotsWithExistingMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").startObject("properties") .startObject("bar").startObject("properties") .startObject("baz").field("type", "integer") - .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo.bar.baz", 123) .startObject("foo") @@ -95,7 +96,7 @@ public void testDotsWithExistingMapper() throws Exception { .startObject("foo.bar") .field("baz", 789) .endObject() - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.dynamicMappingsUpdate()); // no update! String[] values = doc.rootDoc().getValues("foo.bar.baz"); @@ -107,16 +108,16 @@ public void testDotsWithExistingMapper() throws Exception { public void testDotsWithExistingNestedMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").field("type", "nested").startObject("properties") .startObject("bar").field("type", "integer") - .endObject().endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo.bar", 123) - .endObject().bytes(); + .endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals( @@ -126,7 +127,7 @@ public void testDotsWithExistingNestedMapper() throws Exception { public void testDotsWithDynamicNestedMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates") .startObject() .startObject("objects_as_nested") @@ -136,13 +137,13 @@ public void testDotsWithDynamicNestedMapper() throws Exception { .endObject() .endObject() .endObject() - .endArray().endObject().endObject().string(); + .endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo.bar",42) - .endObject().bytes(); + .endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals( @@ -179,8 +180,8 @@ public void testNestedHaveIdAndTypeFields() throws Exception { mapping.endObject(); } mapping.endObject().endObject().endObject(); - DocumentMapper mapper1 = mapperParser1.parse("type", new CompressedXContent(mapping.string())); - DocumentMapper mapper2 = mapperParser2.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper1 = mapperParser1.parse("type", new CompressedXContent(Strings.toString(mapping))); + DocumentMapper mapper2 = mapperParser2.parse("type", new CompressedXContent(Strings.toString(mapping))); XContentBuilder doc = XContentFactory.jsonBuilder().startObject(); { @@ -196,7 +197,7 @@ public void testNestedHaveIdAndTypeFields() throws Exception { doc.endObject(); // Verify in the case where multiple types are allowed that the _uid field is added to nested documents: - ParsedDocument result = mapper1.parse(SourceToParse.source("index1", "type", "1", doc.bytes(), XContentType.JSON)); + ParsedDocument result = mapper1.parse(SourceToParse.source("index1", "type", "1", BytesReference.bytes(doc), XContentType.JSON)); assertEquals(2, result.docs().size()); // Nested document: assertNull(result.docs().get(0).getField(IdFieldMapper.NAME)); @@ -216,7 +217,7 @@ public void testNestedHaveIdAndTypeFields() throws Exception { assertEquals("value2", result.docs().get(1).getField("baz").binaryValue().utf8ToString()); // Verify in the case where only a single type is allowed that the _id field is added to nested documents: - result = mapper2.parse(SourceToParse.source("index2", "type", "1", doc.bytes(), XContentType.JSON)); + result = mapper2.parse(SourceToParse.source("index2", "type", "1", BytesReference.bytes(doc), XContentType.JSON)); assertEquals(2, result.docs().size()); // Nested document: assertNull(result.docs().get(0).getField(UidFieldMapper.NAME)); @@ -237,19 +238,19 @@ public void testNestedHaveIdAndTypeFields() throws Exception { public void testPropagateDynamicWithExistingMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .field("dynamic", false) .startObject("properties") .startObject("foo") .field("type", "object") .field("dynamic", true) .startObject("properties") - .endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "something") - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.rootDoc().getField("foo.bar")); @@ -257,19 +258,19 @@ public void testPropagateDynamicWithExistingMapper() throws Exception { public void testPropagateDynamicWithDynamicMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .field("dynamic", false) .startObject("properties") .startObject("foo") .field("type", "object") .field("dynamic", true) .startObject("properties") - .endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo").startObject("bar") .field("baz", "something") - .endObject().endObject().endObject().bytes(); + .endObject().endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.rootDoc().getField("foo.bar.baz")); @@ -277,30 +278,30 @@ public void testPropagateDynamicWithDynamicMapper() throws Exception { public void testDynamicRootFallback() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .field("dynamic", false) .startObject("properties") .startObject("foo") .field("type", "object") .startObject("properties") - .endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "something") - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.dynamicMappingsUpdate()); assertNull(doc.rootDoc().getField("foo.bar")); } DocumentMapper createDummyMapping(MapperService mapperService) throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("y").field("type", "object").endObject() .startObject("x").startObject("properties") .startObject("subx").field("type", "object").startObject("properties") .startObject("subsubx").field("type", "object") - .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping)); return defaultMapper; @@ -405,81 +406,81 @@ public void testObjectMappingUpdate() throws Exception { public void testDynamicGeoPointArrayWithTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "geo_point").field("doc_values", false).endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .startArray().value(0).value(0).endArray() .startArray().value(1).value(1).endArray() - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo").length); } public void testDynamicLongArrayWithTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "long").endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } public void testDynamicLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } public void testDynamicFalseLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo").length); } public void testDynamicStrictLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -487,78 +488,78 @@ public void testDynamicStrictLongArray() throws Exception { public void testMappedGeoPointArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "geo_point").field("doc_values", false) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .startArray().value(0).value(0).endArray() .startArray().value(1).value(1).endArray() - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo").length); } public void testMappedLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } public void testDynamicObjectWithTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "object") .startObject("properties").startObject("bar").field("type", "keyword").endObject().endObject().endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "baz") - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar").length); } public void testDynamicFalseObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "baz") - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar").length); } public void testDynamicStrictObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo") .field("bar", "baz") - .endObject().endObject().bytes(); + .endObject().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -566,28 +567,28 @@ public void testDynamicStrictObject() throws Exception { public void testDynamicFalseValue() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("bar", "baz") - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("bar").length); } public void testDynamicStrictValue() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("bar", "baz") - .endObject().bytes(); + .endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [bar] within [type] is not allowed", exception.getMessage()); @@ -595,28 +596,28 @@ public void testDynamicStrictValue() throws Exception { public void testDynamicFalseNull() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("bar", (String) null) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("bar").length); } public void testDynamicStrictNull() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("bar", (String) null) - .endObject().bytes(); + .endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [bar] within [type] is not allowed", exception.getMessage()); @@ -624,29 +625,29 @@ public void testDynamicStrictNull() throws Exception { public void testMappedNullValue() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo", (Long) null) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo").length); } public void testDynamicDottedFieldNameLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -662,18 +663,18 @@ public void testDynamicDottedFieldNameLongArray() throws Exception { public void testDynamicDottedFieldNameLongArrayWithParentTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "object").endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -689,17 +690,17 @@ public void testDynamicDottedFieldNameLongArrayWithParentTemplate() throws Excep public void testDynamicDottedFieldNameLongArrayWithExistingParent() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "object") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -715,17 +716,17 @@ public void testDynamicDottedFieldNameLongArrayWithExistingParent() throws Excep public void testDynamicDottedFieldNameLongArrayWithExistingParentWrongType() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " @@ -734,30 +735,30 @@ public void testDynamicDottedFieldNameLongArrayWithExistingParentWrongType() thr public void testDynamicFalseDottedFieldNameLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz").length); } public void testDynamicStrictDottedFieldNameLongArray() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("foo.bar.baz") .value(0) .value(1) - .endArray().endObject().bytes(); + .endArray().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -765,13 +766,13 @@ public void testDynamicStrictDottedFieldNameLongArray() throws Exception { public void testDynamicDottedFieldNameLong() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -787,16 +788,16 @@ public void testDynamicDottedFieldNameLong() throws Exception { public void testDynamicDottedFieldNameLongWithParentTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "object").endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -812,15 +813,15 @@ public void testDynamicDottedFieldNameLongWithParentTemplate() throws Exception public void testDynamicDottedFieldNameLongWithExistingParent() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "object") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -836,15 +837,15 @@ public void testDynamicDottedFieldNameLongWithExistingParent() throws Exception public void testDynamicDottedFieldNameLongWithExistingParentWrongType() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " @@ -853,26 +854,26 @@ public void testDynamicDottedFieldNameLongWithExistingParentWrongType() throws E public void testDynamicFalseDottedFieldNameLong() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz").length); } public void testDynamicStrictDottedFieldNameLong() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) - .endObject().bytes(); + .endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -880,14 +881,14 @@ public void testDynamicStrictDottedFieldNameLong() throws Exception { public void testDynamicDottedFieldNameObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo.bar.baz") .field("a", 0) - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -906,17 +907,17 @@ public void testDynamicDottedFieldNameObject() throws Exception { public void testDynamicDottedFieldNameObjectWithParentTemplate() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("georule") .field("match", "foo*") .startObject("mapping").field("type", "object").endObject() - .endObject().endObject().endArray().endObject().endObject().string(); + .endObject().endObject().endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo.bar.baz") .field("a", 0) - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -935,12 +936,12 @@ public void testDynamicDottedFieldNameObjectWithParentTemplate() throws Exceptio public void testDynamicDottedFieldNameObjectWithExistingParent() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("foo") - .field("type", "object").endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("foo") + .field("type", "object").endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject() - .bytes(); + BytesReference bytes = BytesReference + .bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); @@ -959,14 +960,14 @@ public void testDynamicDottedFieldNameObjectWithExistingParent() throws Exceptio public void testDynamicDottedFieldNameObjectWithExistingParentWrongType() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject() - .bytes(); + BytesReference bytes = BytesReference + .bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); @@ -976,28 +977,28 @@ public void testDynamicDottedFieldNameObjectWithExistingParentWrongType() throws public void testDynamicFalseDottedFieldNameObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "false") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo.bar.baz") .field("a", 0) - .endObject().endObject().bytes(); + .endObject().endObject()); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz.a").length); } public void testDynamicStrictDottedFieldNameObject() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").field("dynamic", "strict") + .endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startObject("foo.bar.baz") .field("a", 0) - .endObject().endObject().bytes(); + .endObject().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); @@ -1005,15 +1006,15 @@ public void testDynamicStrictDottedFieldNameObject() throws Exception { public void testDocumentContainsMetadataField() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder().startObject().field("_ttl", 0).endObject().bytes(); + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_ttl", 0).endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertTrue(e.getMessage(), e.getMessage().contains("cannot be added inside a document")); - BytesReference bytes2 = XContentFactory.jsonBuilder().startObject().field("foo._ttl", 0).endObject().bytes(); + BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("foo._ttl", 0).endObject()); mapper.parse(SourceToParse.source("test", "type", "1", bytes2, XContentType.JSON)); // parses without error } @@ -1094,17 +1095,17 @@ public void testNoDocumentSent() throws Exception { } public void testNoLevel() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("test1"), equalTo("value1")); @@ -1113,17 +1114,17 @@ public void testNoLevel() throws Exception { } public void testTypeLevel() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.test1"), equalTo("value1")); @@ -1132,18 +1133,18 @@ public void testTypeLevel() throws Exception { } public void testNoLevelWithFieldTypeAsValue() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("type", "value_type") - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("type", "value_type") + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type"), equalTo("value_type")); @@ -1153,18 +1154,18 @@ public void testNoLevelWithFieldTypeAsValue() throws Exception { } public void testTypeLevelWithFieldTypeAsValue() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("type", "value_type") - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("type", "value_type") + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); @@ -1174,18 +1175,18 @@ public void testTypeLevelWithFieldTypeAsValue() throws Exception { } public void testNoLevelWithFieldTypeAsObject() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type").field("type_field", "type_value").endObject() - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("type").field("type_field", "type_value").endObject() + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject()), XContentType.JSON)); // in this case, we analyze the type object as the actual document, and ignore the other same level fields @@ -1195,18 +1196,18 @@ public void testNoLevelWithFieldTypeAsObject() throws Exception { } public void testTypeLevelWithFieldTypeAsObject() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .startObject("type").field("type_field", "type_value").endObject() - .field("test1", "value1") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .startObject("type").field("type_field", "type_value").endObject() + .field("test1", "value1") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type.type_field"), equalTo("type_value")); @@ -1216,18 +1217,18 @@ public void testTypeLevelWithFieldTypeAsObject() throws Exception { } public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("test1", "value1") - .field("test2", "value2") - .field("type", "value_type") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("test1", "value1") + .field("test2", "value2") + .field("type", "value_type") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); @@ -1237,18 +1238,18 @@ public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception { } public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("test1", "value1") - .field("type", "value_type") - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("test1", "value1") + .field("type", "value_type") + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); @@ -1258,18 +1259,18 @@ public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception { } public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("test1", "value1") - .startObject("type").field("type_field", "type_value").endObject() - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("test1", "value1") + .startObject("type").field("type_field", "type_value").endObject() + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject()), XContentType.JSON)); // when the type is not the first one, we don't confuse it... @@ -1280,18 +1281,18 @@ public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception { } public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startObject("type") - .field("test1", "value1") - .startObject("type").field("type_field", "type_value").endObject() - .field("test2", "value2") - .startObject("inner").field("inner_field", "inner_value").endObject() - .endObject().endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startObject("type") + .field("test1", "value1") + .startObject("type").field("type_field", "type_value").endObject() + .field("test2", "value2") + .startObject("inner").field("inner_field", "inner_value").endObject() + .endObject().endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type.type_field"), equalTo("type_value")); @@ -1302,16 +1303,16 @@ public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception { public void testDynamicDateDetectionDisabledOnNumbers() throws IOException { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_date_formats") .value("yyyy") - .endArray().endObject().endObject().string(); + .endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo", "2016") - .endObject().bytes(); + .endObject()); // Even though we matched the dynamic format, we do not match on numbers, // which are too likely to be false positives @@ -1325,16 +1326,16 @@ public void testDynamicDateDetectionDisabledOnNumbers() throws IOException { public void testDynamicDateDetectionEnabledWithNoSpecialCharacters() throws IOException { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_date_formats") .value("yyyy MM") - .endArray().endObject().endObject().string(); + .endArray().endObject().endObject()); DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("foo", "2016 12") - .endObject().bytes(); + .endObject()); // We should have generated a date field ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); @@ -1346,17 +1347,17 @@ public void testDynamicDateDetectionEnabledWithNoSpecialCharacters() throws IOEx } public void testDynamicFieldsStartingAndEndingWithDot() throws Exception { - BytesReference bytes = XContentFactory.jsonBuilder().startObject().startArray("top.") + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startArray("top.") .startObject().startArray("foo.") .startObject() .field("thing", "bah") .endObject().endArray() .endObject().endArray() - .endObject().bytes(); + .endObject()); client().prepareIndex("idx", "type").setSource(bytes, XContentType.JSON).get(); - bytes = XContentFactory.jsonBuilder().startObject().startArray("top.") + bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startArray("top.") .startObject().startArray("foo.") .startObject() .startObject("bar.") @@ -1366,7 +1367,7 @@ public void testDynamicFieldsStartingAndEndingWithDot() throws Exception { .endObject() .endObject() .endArray().endObject().endArray() - .endObject().bytes(); + .endObject()); try { client().prepareIndex("idx", "type").setSource(bytes, XContentType.JSON).get(); @@ -1378,14 +1379,14 @@ public void testDynamicFieldsStartingAndEndingWithDot() throws Exception { } public void testDynamicFieldsEmptyName() throws Exception { - BytesReference bytes = XContentFactory.jsonBuilder() + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("top.") .startObject() .startObject("aoeu") .field("a", 1).field(" ", 2) .endObject() .endObject().endArray() - .endObject().bytes(); + .endObject()); IllegalArgumentException emptyFieldNameException = expectThrows(IllegalArgumentException.class, () -> client().prepareIndex("idx", "type").setSource(bytes, XContentType.JSON).get()); @@ -1395,21 +1396,21 @@ public void testDynamicFieldsEmptyName() throws Exception { } public void testBlankFieldNames() throws Exception { - final BytesReference bytes = XContentFactory.jsonBuilder() + final BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("", "foo") - .endObject().bytes(); + .endObject()); MapperParsingException err = expectThrows(MapperParsingException.class, () -> client().prepareIndex("idx", "type").setSource(bytes, XContentType.JSON).get()); assertThat(ExceptionsHelper.detailedMessage(err), containsString("field name cannot be an empty string")); - final BytesReference bytes2 = XContentFactory.jsonBuilder() + final BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("foo") .field("", "bar") .endObject() - .endObject().bytes(); + .endObject()); err = expectThrows(MapperParsingException.class, () -> client().prepareIndex("idx", "type").setSource(bytes2, XContentType.JSON).get()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 804214ad96f51..6c83f31f93fe6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -24,12 +24,12 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -40,23 +40,23 @@ public void testDoubleIndexingSameDoc() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), Lucene.STANDARD_ANALYZER)); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper mapper = index.mapperService().documentMapper("type"); QueryShardContext context = index.newQueryShardContext(0, null, () -> 0L, null); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", 1) - .field("field3", 1.1) - .field("field4", "2010-01-01") - .startArray("field5").value(1).value(2).value(3).endArray() - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", 1) + .field("field3", 1.1) + .field("field4", "2010-01-01") + .startArray("field5").value(1).value(2).value(3).endArray() + .endObject()), XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index b227833f3444d..4a2876d1708bd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -34,7 +35,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.BooleanFieldMapper.BooleanFieldType; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; -import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -59,21 +59,21 @@ protected Collection> getPlugins() { } public void testDynamicTrue() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "true") .startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", "value2") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("field1"), equalTo("value1")); @@ -81,21 +81,21 @@ public void testDynamicTrue() throws IOException { } public void testDynamicFalse() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "false") .startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", "value2") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("field1"), equalTo("value1")); @@ -104,53 +104,53 @@ public void testDynamicFalse() throws IOException { public void testDynamicStrict() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "strict") .startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes(), + StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", "value2") + .endObject()), XContentType.JSON))); assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [type] is not allowed")); - e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", (String) null) - .endObject() - .bytes(), + e = expectThrows(StrictDynamicMappingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", "value1") + .field("field2", (String) null) + .endObject()), XContentType.JSON))); assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [type] is not allowed")); } public void testDynamicFalseWithInnerObjectButDynamicSetOnRoot() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "false") .startObject("properties") .startObject("obj1").startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject().endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject().startObject("obj1") - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject().startObject("obj1") + .field("field1", "value1") + .field("field2", "value2") + .endObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("obj1.field1"), equalTo("value1")); @@ -158,25 +158,25 @@ public void testDynamicFalseWithInnerObjectButDynamicSetOnRoot() throws IOExcept } public void testDynamicStrictWithInnerObjectButDynamicSetOnRoot() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type") .field("dynamic", "strict") .startObject("properties") .startObject("obj1").startObject("properties") .startObject("field1").field("type", "text").endObject() .endObject().endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> - defaultMapper.parse(SourceToParse.source("test", "type", "1", jsonBuilder() - .startObject().startObject("obj1") - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .endObject() - .bytes(), + defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(jsonBuilder() + .startObject().startObject("obj1") + .field("field1", "value1") + .field("field2", "value2") + .endObject() + .endObject()), XContentType.JSON))); assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [obj1] is not allowed")); } @@ -207,12 +207,12 @@ public void testTypeNotCreatedOnIndexFailure() throws IOException, InterruptedEx private String serialize(ToXContent mapper) throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, new ToXContent.MapParams(emptyMap())); - return builder.endObject().string(); + return Strings.toString(builder.endObject()); } private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XContentBuilder builder) throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - SourceToParse source = SourceToParse.source("test", mapper.type(), "some_id", builder.bytes(), builder.contentType()); + SourceToParse source = SourceToParse.source("test", mapper.type(), "some_id", BytesReference.bytes(builder), builder.contentType()); try (XContentParser xContentParser = createParser(JsonXContent.jsonXContent, source.source())) { ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext(settings, parser, mapper, source, xContentParser); assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken()); @@ -226,9 +226,9 @@ private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XConten public void testDynamicMappingsNotNeeded() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()); @@ -239,9 +239,9 @@ public void testDynamicMappingsNotNeeded() throws Exception { public void testField() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -251,7 +251,7 @@ public void testField() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo") .field("type", "text") .startObject("fields") @@ -261,7 +261,7 @@ public void testField() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testIncremental() throws Exception { @@ -269,9 +269,9 @@ public void testIncremental() throws Exception { DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); // Make sure that mapping updates are incremental, this is important for performance otherwise // every new field introduction runs in linear time with the total number of fields - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("foo").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -281,7 +281,7 @@ public void testIncremental() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") // foo is NOT in the update .startObject("bar").field("type", "text") .startObject("fields") @@ -291,15 +291,15 @@ public void testIncremental() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testIntroduceTwoFields() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -309,7 +309,7 @@ public void testIntroduceTwoFields() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("bar").field("type", "text") .startObject("fields") .startObject("keyword") @@ -326,15 +326,15 @@ public void testIntroduceTwoFields() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testObject() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -344,19 +344,19 @@ public void testObject() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "text") .startObject("fields").startObject("keyword").field("type", "keyword").field("ignore_above", 256).endObject() .endObject().endObject().endObject().endObject().endObject().endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testArray() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -366,7 +366,7 @@ public void testArray() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo") .field("type", "text") .startObject("fields") @@ -376,15 +376,15 @@ public void testArray() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testInnerDynamicMapping() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo").field("type", "object").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -394,19 +394,19 @@ public void testInnerDynamicMapping() throws Exception { // original mapping not modified assertEquals(mapping, serialize(mapper)); // but we have an update - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "text").startObject("fields") .startObject("keyword").field("type", "keyword").field("ignore_above", 256).endObject() .endObject().endObject().endObject().endObject().endObject().endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testComplexArray() throws Exception { IndexService indexService = createIndex("test"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type").endObject() - .endObject().string(); + .endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, serialize(mapper)); @@ -416,7 +416,7 @@ public void testComplexArray() throws Exception { .startObject().field("baz", 3).endObject() .endArray().endObject()); assertEquals(mapping, serialize(mapper)); - assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + assertEquals(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo").startObject("properties") .startObject("bar").field("type", "text") .startObject("fields") @@ -428,7 +428,7 @@ public void testComplexArray() throws Exception { .endObject() .startObject("baz").field("type", "long").endObject() .endObject().endObject() - .endObject().endObject().endObject().string(), serialize(update)); + .endObject().endObject().endObject()), serialize(update)); } public void testReuseExistingMappings() throws IOException, Exception { @@ -551,13 +551,13 @@ public void testMixTemplateMultiFieldAndMappingReuse() throws Exception { .endObject() .endArray() .endObject().endObject(); - indexService.mapperService().merge("_doc", new CompressedXContent(mappings1.bytes()), + indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(mappings1)), MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder json = XContentFactory.jsonBuilder().startObject() .field("field", "foo") .endObject(); - SourceToParse source = SourceToParse.source("test", "_doc", "1", json.bytes(), json.contentType()); + SourceToParse source = SourceToParse.source("test", "_doc", "1", BytesReference.bytes(json), json.contentType()); DocumentMapper mapper = indexService.mapperService().documentMapper("_doc"); assertNull(mapper.mappers().getMapper("field.raw")); ParsedDocument parsed = mapper.parse(source); @@ -591,7 +591,8 @@ public void testMixTemplateMultiFieldMultiTypeAndMappingReuse() throws Exception .endObject() .endArray() .endObject().endObject(); - indexService.mapperService().merge("type1", new CompressedXContent(mappings1.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); + indexService.mapperService().merge("type1", new CompressedXContent(BytesReference.bytes(mappings1)), + MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder mappings2 = jsonBuilder().startObject() .startObject("type2") .startObject("properties") @@ -600,12 +601,12 @@ public void testMixTemplateMultiFieldMultiTypeAndMappingReuse() throws Exception .endObject() .endObject() .endObject().endObject(); - indexService.mapperService().merge("type2", new CompressedXContent(mappings2.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); + indexService.mapperService().merge("type2", new CompressedXContent(BytesReference.bytes(mappings2)), MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder json = XContentFactory.jsonBuilder().startObject() .field("field", "foo") .endObject(); - SourceToParse source = SourceToParse.source("test", "type1", "1", json.bytes(), json.contentType()); + SourceToParse source = SourceToParse.source("test", "type1", "1", BytesReference.bytes(json), json.contentType()); DocumentMapper mapper = indexService.mapperService().documentMapper("type1"); assertNull(mapper.mappers().getMapper("field.raw")); ParsedDocument parsed = mapper.parse(source); @@ -620,10 +621,10 @@ public void testMixTemplateMultiFieldMultiTypeAndMappingReuse() throws Exception public void testDefaultFloatingPointMappings() throws IOException { MapperService mapperService = createIndex("test").mapperService(); - String mapping = jsonBuilder().startObject() + String mapping = Strings.toString(jsonBuilder().startObject() .startObject("type") .field("numeric_detection", true) - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); DocumentMapper mapper = mapperService.documentMapper("type"); doTestDefaultFloatingPointMappings(mapper, XContentFactory.jsonBuilder()); @@ -633,12 +634,12 @@ public void testDefaultFloatingPointMappings() throws IOException { } private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentBuilder builder) throws IOException { - BytesReference source = builder.startObject() + BytesReference source = BytesReference.bytes(builder.startObject() .field("foo", 3.2f) // float .field("bar", 3.2d) // double .field("baz", (double) 3.2f) // double that can be accurately represented as a float .field("quux", "3.2") // float detected through numeric detection - .endObject().bytes(); + .endObject()); ParsedDocument parsedDocument = mapper.parse(SourceToParse.source("index", "type", "id", source, builder.contentType())); Mapping update = parsedDocument.dynamicMappingsUpdate(); assertNotNull(update); @@ -649,20 +650,20 @@ private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentB } public void testNumericDetectionEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .field("numeric_detection", true) - .endObject().endObject().string(); + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("s_long", "100") - .field("s_double", "100.0") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("s_long", "100") + .field("s_double", "100.0") + .endObject()), XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") @@ -677,19 +678,19 @@ public void testNumericDetectionEnabled() throws Exception { } public void testNumericDetectionDefault() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("s_long", "100") - .field("s_double", "100.0") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("s_long", "100") + .field("s_double", "100.0") + .endObject()), XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertAcked(client().admin().indices().preparePutMapping("test").setType("type") @@ -704,7 +705,7 @@ public void testNumericDetectionDefault() throws Exception { } public void testDateDetectionInheritsFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_date_formats") .value("yyyy-MM-dd") .endArray() @@ -727,19 +728,19 @@ public void testDateDetectionInheritsFormat() throws Exception { .endObject() .endObject() .endArray() - .endObject().endObject().string(); + .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date1", "2016-11-20") - .field("date2", "2016-11-20") - .field("date3", "2016-11-20") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("date1", "2016-11-20") + .field("date2", "2016-11-20") + .field("date3", "2016-11-20") + .endObject()), XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertAcked(client().admin().indices().preparePutMapping("test").setType("type") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java index 5fadf81ed927f..59315445c32cd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java @@ -21,6 +21,8 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -45,17 +47,17 @@ public void testDynamicMappingDefault() throws IOException { .documentMapperWithAutoCreate("my-type").getDocumentMapper(); ParsedDocument parsedDoc = documentMapper.parse( - SourceToParse.source("my-index", "my-type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("foo", 3) - .endObject() - .bytes(), XContentType.JSON)); + SourceToParse.source("my-index", "my-type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("foo", 3) + .endObject()), XContentType.JSON)); - String expectedMapping = XContentFactory.jsonBuilder().startObject() + String expectedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("my-type") .startObject("properties") .startObject("foo").field("type", "long") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); assertEquals(expectedMapping, parsedDoc.dynamicMappingsUpdate().toString()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index 7ed6efe516ab0..562d54a92babd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -86,7 +87,7 @@ public void testSerialization() throws Exception { DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); XContentBuilder builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", builder.string()); + assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); // name-based template templateDef = new HashMap<>(); @@ -96,7 +97,7 @@ public void testSerialization() throws Exception { template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", builder.string()); + assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); // path-based template templateDef = new HashMap<>(); @@ -107,7 +108,7 @@ public void testSerialization() throws Exception { builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"path_match\":\"*name\",\"path_unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", - builder.string()); + Strings.toString(builder)); // regex matching templateDef = new HashMap<>(); @@ -117,6 +118,6 @@ public void testSerialization() throws Exception { template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", builder.string()); + assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 70cc2c08441eb..64927103e6d1d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -23,15 +23,12 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentFieldMappers; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import org.hamcrest.Matchers; @@ -51,7 +48,7 @@ public void testMatchTypeOnly() throws Exception { DocumentMapper docMapper = index.mapperService().documentMapper("person"); builder = JsonXContent.contentBuilder(); builder.startObject().field("s", "hello").field("l", 1).endObject(); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", builder.bytes(), + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", BytesReference.bytes(builder), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); @@ -74,7 +71,7 @@ public void testSimple() throws Exception { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); @@ -133,7 +130,7 @@ public void testSimpleWithXContentTraverse() throws Exception { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index 72d6e8c4c2cc7..8f2a51bbfc2bd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -23,6 +23,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; @@ -71,20 +73,20 @@ public void testExternalValues() throws Exception { indexService.getIndexAnalyzers(), indexService.xContentRegistry(), indexService.similarityService(), mapperRegistry, queryShardContext); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( - XContentFactory.jsonBuilder().startObject().startObject("type") + Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject(ExternalMetadataMapper.CONTENT_TYPE) .endObject() .startObject("properties") .startObject("field").field("type", "external").endObject() .endObject() - .endObject().endObject().string() + .endObject().endObject()) )); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); @@ -123,7 +125,8 @@ public void testExternalValuesWithMultifield() throws Exception { queryShardContext); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( - XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + Strings + .toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("field") .field("type", ExternalMapperPlugin.EXTERNAL) .startObject("fields") @@ -139,14 +142,13 @@ public void testExternalValuesWithMultifield() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject() - .string())); + .endObject().endObject().endObject()))); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); @@ -189,7 +191,8 @@ public void testExternalValuesWithMultifieldTwoLevels() throws Exception { queryShardContext); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent( - XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + Strings + .toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("field") .field("type", ExternalMapperPlugin.EXTERNAL) .startObject("fields") @@ -209,14 +212,13 @@ public void testExternalValuesWithMultifieldTwoLevels() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject() - .string())); + .endObject().endObject().endObject()))); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("field.bool"), notNullValue()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 3655f04fcbba1..32a2f73536a00 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -61,9 +63,9 @@ public void testExtractFieldNames() { } public void testFieldType() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); @@ -75,66 +77,66 @@ public void testFieldType() throws Exception { } public void testInjectIntoDocDuringParsing() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("a", "100") - .startObject("b") - .field("c", 42) - .endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("a", "100") + .startObject("b") + .field("c", 42) + .endObject() + .endObject()), XContentType.JSON)); assertFieldNames(Collections.emptySet(), doc); } public void testExplicitEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", true).endObject() .startObject("properties").startObject("field").field("type", "keyword").field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertTrue(fieldNamesMapper.fieldType().isEnabled()); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()), XContentType.JSON)); assertFieldNames(set("field"), doc); } public void testDisabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertFalse(fieldNamesMapper.fieldType().isEnabled()); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()), XContentType.JSON)); assertNull(doc.rootDoc().get("_field_names")); } public void testMergingMappings() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String enabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", true).endObject() - .endObject().endObject().string(); - String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); + String disabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index f8775073e2169..40fc0e81a920c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -21,6 +21,8 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,14 +54,14 @@ protected Collection> getPlugins() { public void testGeoHashValue() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", stringEncode(1.3, 1.2)) - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("point", stringEncode(1.3, 1.2)) + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -68,14 +70,14 @@ public void testGeoHashValue() throws Exception { public void testLatLonValuesStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -84,17 +86,17 @@ public void testLatLonValuesStored() throws Exception { public void testArrayLatLonValues() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point") - .startObject().field("lat", 1.2).field("lon", 1.3).endObject() - .startObject().field("lat", 1.4).field("lon", 1.5).endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point") + .startObject().field("lat", 1.2).field("lon", 1.3).endObject() + .startObject().field("lat", 1.4).field("lon", 1.5).endObject() + .endArray() + .endObject()), XContentType.JSON)); // doc values are enabled by default, but in this test we disable them; we should only have 2 points @@ -105,15 +107,15 @@ public void testArrayLatLonValues() throws Exception { public void testLatLonInOneValue() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", "1.2,1.3") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("point", "1.2,1.3") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -122,15 +124,15 @@ public void testLatLonInOneValue() throws Exception { public void testLatLonInOneValueStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", "1.2,1.3") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("point", "1.2,1.3") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } @@ -138,18 +140,18 @@ public void testLatLonInOneValueStored() throws Exception { public void testLatLonInOneValueArray() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point") - .value("1.2,1.3") - .value("1.4,1.5") - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point") + .value("1.2,1.3") + .value("1.4,1.5") + .endArray() + .endObject()), XContentType.JSON)); // doc values are enabled by default, but in this test we disable them; we should only have 2 points @@ -160,14 +162,14 @@ public void testLatLonInOneValueArray() throws Exception { public void testLonLatArray() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point").value(1.3).value(1.2).endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point").value(1.3).value(1.2).endArray() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -177,14 +179,14 @@ public void testLonLatArrayDynamic() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("point").field("match", "point*") .startObject("mapping").field("type", "geo_point"); - String mapping = xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point").value(1.3).value(1.2).endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point").value(1.3).value(1.2).endArray() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -193,14 +195,14 @@ public void testLonLatArrayDynamic() throws Exception { public void testLonLatArrayStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point").value(1.3).value(1.2).endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point").value(1.3).value(1.2).endArray() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); @@ -210,18 +212,18 @@ public void testLonLatArrayStored() throws Exception { public void testLonLatArrayArrayStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - String mapping = xContentBuilder.field("store", true).field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + String mapping = Strings.toString(xContentBuilder.field("store", true).field("doc_values", false).endObject().endObject() + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("point") - .startArray().value(1.3).value(1.2).endArray() - .startArray().value(1.5).value(1.4).endArray() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startArray("point") + .startArray().value(1.3).value(1.2).endArray() + .startArray().value(1.5).value(1.4).endArray() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields("point"), notNullValue()); @@ -230,13 +232,13 @@ public void testLonLatArrayArrayStored() throws Exception { public void testMultiField() throws Exception { int numDocs = randomIntBetween(10, 100); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") .field("type", "geo_point") .startObject("fields") .startObject("geohash").field("type", "keyword").endObject() // test geohash as keyword .startObject("latlon").field("type", "keyword").endObject() // test geohash as string .endObject() - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test") .addMapping("pin", mapping, XContentType.JSON); mappingRequest.execute().actionGet(); @@ -262,9 +264,9 @@ public void testMultiField() throws Exception { public void testEmptyName() throws Exception { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "geo_point").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index e43cfbe1fd1c1..352ef56b2315f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; @@ -46,11 +47,11 @@ protected Collection> getPlugins() { } public void testDefaultConfiguration() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -69,12 +70,12 @@ public void testDefaultConfiguration() throws IOException { * Test that orientation parameter correctly parses */ public void testOrientationParsing() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("orientation", "left") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -86,12 +87,12 @@ public void testOrientationParsing() throws IOException { assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); // explicit right orientation test - mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("orientation", "right") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -107,12 +108,12 @@ public void testOrientationParsing() throws IOException { * Test that coerce parameter correctly parses */ public void testCoerceParsing() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("coerce", "true") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -122,12 +123,12 @@ public void testCoerceParsing() throws IOException { assertThat(coerce, equalTo(true)); // explicit false coerce test - mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("coerce", "false") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -141,12 +142,12 @@ public void testCoerceParsing() throws IOException { * Test that ignore_malformed parameter correctly parses */ public void testIgnoreMalformedParsing() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("ignore_malformed", "true") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -156,12 +157,12 @@ public void testIgnoreMalformedParsing() throws IOException { assertThat(ignoreMalformed.value(), equalTo(true)); // explicit false ignore_malformed test - mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("ignore_malformed", "false") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -173,14 +174,14 @@ public void testIgnoreMalformedParsing() throws IOException { } public void testGeohashConfiguration() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") .field("tree_levels", "4") .field("distance_error_pct", "0.1") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -195,7 +196,7 @@ public void testGeohashConfiguration() throws IOException { } public void testQuadtreeConfiguration() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") @@ -203,7 +204,7 @@ public void testQuadtreeConfiguration() throws IOException { .field("distance_error_pct", "0.5") .field("points_only", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -222,7 +223,7 @@ public void testLevelPrecisionConfiguration() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") @@ -230,7 +231,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .field("precision", "70m") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); @@ -247,14 +248,14 @@ public void testLevelPrecisionConfiguration() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .field("tree_levels", "26") .field("precision", "70m") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); @@ -273,7 +274,7 @@ public void testLevelPrecisionConfiguration() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") @@ -281,7 +282,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .field("precision", "70m") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -297,7 +298,7 @@ public void testLevelPrecisionConfiguration() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") @@ -305,7 +306,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .field("precision", "70m") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -320,7 +321,7 @@ public void testLevelPrecisionConfiguration() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") @@ -328,7 +329,7 @@ public void testLevelPrecisionConfiguration() throws IOException { .field("precision", "70m") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -344,13 +345,13 @@ public void testLevelPrecisionConfiguration() throws IOException { } public void testPointsOnlyOption() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") .field("points_only", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -366,13 +367,13 @@ public void testPointsOnlyOption() throws IOException { public void testLevelDefaults() throws IOException { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); @@ -389,13 +390,13 @@ public void testLevelDefaults() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "geohash") .field("distance_error_pct", "0.5") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); @@ -412,16 +413,16 @@ public void testLevelDefaults() throws IOException { } public void testGeoShapeMapperMerge() throws Exception { - String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("shape").field("type", "geo_shape").field("tree", "geohash").field("strategy", "recursive") .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree") .field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26) - .field("orientation", "cw").endObject().endObject().endObject().endObject().string(); + .field("orientation", "cw").endObject().endObject().endObject().endObject()); try { mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); @@ -446,9 +447,9 @@ public void testGeoShapeMapperMerge() throws Exception { assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW)); // correct mapping - stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m") - .field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string(); + .field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject()); docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fieldMapper = docMapper.mappers().getMapper("shape"); @@ -466,11 +467,11 @@ public void testGeoShapeMapperMerge() throws Exception { public void testEmptyName() throws Exception { // after 5.x - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("") .field("type", "geo_shape") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java index ec07c4d92be3e..c7f44909cbd22 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java @@ -22,7 +22,9 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -45,12 +47,12 @@ protected Collection> getPlugins() { } public void testIncludeInObjectNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { - docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_id", "1").endObject().bytes(), XContentType.JSON)); + docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject().field("_id", "1").endObject()), XContentType.JSON)); fail("Expected failure to parse metadata field"); } catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("Field [_id] is a metadata field and cannot be added inside a document")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java index 910fa0f74faba..5e60e248927d7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java @@ -19,16 +19,11 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -47,15 +42,15 @@ protected Collection> getPlugins() { } public void testDefaultDisabledIndexMapper() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("_index"), nullValue()); @@ -63,9 +58,9 @@ public void testDefaultDisabledIndexMapper() throws Exception { } public void testIndexNotConfigurable() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_index").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index 88db0b1b274fd..bb80fb0b796eb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -23,6 +23,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.xcontent.ToXContent; @@ -58,19 +60,19 @@ protected Collection> getPlugins() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "::1") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -87,19 +89,19 @@ public void testDefaults() throws Exception { } public void testNotIndexed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "::1") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -109,19 +111,19 @@ public void testNotIndexed() throws Exception { } public void testNoDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "::1") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -132,19 +134,19 @@ public void testNoDocValues() throws Exception { } public void testStore() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "::1") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -160,34 +162,34 @@ public void testStore() throws Exception { } public void testIgnoreMalformed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", ":1") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", ":1") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("':1' is not an IP string literal")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").field("ignore_malformed", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", ":1") - .endObject() - .bytes(), + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", ":1") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -195,27 +197,27 @@ public void testIgnoreMalformed() throws Exception { } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") .field("type", "ip") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -223,16 +225,16 @@ public void testNullValue() throws IOException { .field("null_value", "::1") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -248,15 +250,15 @@ public void testNullValue() throws IOException { } public void testSerializeDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); IpFieldMapper mapper = (IpFieldMapper)docMapper.root().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.doXContentBody(builder, true, ToXContent.EMPTY_PARAMS); - String got = builder.endObject().string(); + String got = Strings.toString(builder.endObject()); // it would be nice to check the entire serialized default mapper, but there are // a whole lot of bogus settings right now it picks up from calling super.doXContentBody... @@ -266,9 +268,9 @@ public void testSerializeDefaults() throws Exception { } public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java index 829c05701fffe..e16b04748a18b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java @@ -20,6 +20,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -50,18 +52,18 @@ public void testStoreCidr() throws Exception { .startObject("properties").startObject("field").field("type", "ip_range") .field("store", true); mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); final Map cases = new HashMap<>(); cases.put("192.168.0.0/15", "192.169.255.255"); cases.put("192.168.0.0/16", "192.168.255.255"); cases.put("192.168.0.0/17", "192.168.127.255"); for (final Map.Entry entry : cases.entrySet()) { ParsedDocument doc = - mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", entry.getKey()) - .endObject().bytes(), + .endObject()), XContentType.JSON )); IndexableField[] fields = doc.rootDoc().getFields("field"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java index c17df90b5a21d..093dd062ce05d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java @@ -25,8 +25,6 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -45,7 +43,7 @@ public void testMergeMultiField() throws Exception { assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); - BytesReference json = XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject().bytes(); + BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); @@ -101,7 +99,7 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); - BytesReference json = XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject().bytes(); + BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index e67b25b051b4e..c00381134f1b4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -25,6 +25,8 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -77,19 +79,19 @@ public void setup() { } public void testDefaults() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -114,29 +116,29 @@ public void testDefaults() throws Exception { } public void testIgnoreAbove() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("ignore_above", 5).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "elk") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "elk") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "elasticsearch") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "elasticsearch") + .endObject()), XContentType.JSON)); fields = doc.rootDoc().getFields("field"); @@ -144,43 +146,43 @@ public void testIgnoreAbove() throws IOException { } public void testNullValue() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("null_value", "uri").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); fields = doc.rootDoc().getFields("field"); @@ -189,19 +191,19 @@ public void testNullValue() throws IOException { } public void testEnableStore() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -210,19 +212,19 @@ public void testEnableStore() throws IOException { } public void testDisableIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -232,19 +234,19 @@ public void testDisableIndex() throws IOException { } public void testDisableDocValues() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -253,20 +255,20 @@ public void testDisableDocValues() throws IOException { } public void testIndexOptions() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword") .field("index_options", "freqs").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -274,10 +276,10 @@ public void testIndexOptions() throws IOException { assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions()); for (String indexOptions : Arrays.asList("positions", "offsets")) { - final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + final String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword") .field("index_options", indexOptions).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping2))); assertEquals("The [keyword] field does not support positions, got [index_options]=" + indexOptions, e.getMessage()); @@ -285,9 +287,9 @@ public void testIndexOptions() throws IOException { } public void testBoost() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("boost", 2f).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -295,19 +297,19 @@ public void testBoost() throws IOException { } public void testEnableNorms() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("norms", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -316,20 +318,20 @@ public void testEnableNorms() throws IOException { } public void testNormalizer() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "keyword").field("normalizer", "my_lowercase").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "AbC") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "AbC") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -354,16 +356,16 @@ public void testNormalizer() throws IOException { } public void testUpdateNormalizer() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "keyword").field("normalizer", "my_lowercase").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, randomBoolean()); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "keyword").field("normalizer", "my_other_lowercase").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, randomBoolean())); @@ -373,14 +375,14 @@ public void testUpdateNormalizer() throws IOException { } public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("") .field("type", "keyword") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); // Empty name not allowed in index created after 5.0 IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index cdff4f5ff532b..2c515c02e2a79 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -21,6 +21,8 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -141,9 +143,9 @@ public void testIndexIntoDefaultMapping() throws Throwable { public void testTotalFieldsExceedsLimit() throws Throwable { Function mapping = type -> { try { - return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") + return Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") .startObject("field1").field("type", "keyword") - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -158,22 +160,22 @@ public void testTotalFieldsExceedsLimit() throws Throwable { } public void testMappingDepthExceedsLimit() throws Throwable { - CompressedXContent simpleMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent simpleMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("field") .field("type", "text") .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); IndexService indexService1 = createIndex("test1", Settings.builder().put(MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING.getKey(), 1).build()); // no exception indexService1.mapperService().merge("type", simpleMapping, MergeReason.MAPPING_UPDATE, false); - CompressedXContent objectMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent objectMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("object1") .field("type", "object") .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); IndexService indexService2 = createIndex("test2"); // no exception @@ -231,12 +233,12 @@ public void testOtherDocumentMappersOnlyUpdatedWhenChangingFieldType() throws IO IndexService indexService = createIndex("test", Settings.builder().put("index.version.created", Version.V_5_6_0).build()); // multiple types - CompressedXContent simpleMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent simpleMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("field") .field("type", "text") .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); indexService.mapperService().merge("type1", simpleMapping, MergeReason.MAPPING_UPDATE, true); DocumentMapper documentMapper = indexService.mapperService().documentMapper("type1"); @@ -244,13 +246,13 @@ public void testOtherDocumentMappersOnlyUpdatedWhenChangingFieldType() throws IO indexService.mapperService().merge("type2", simpleMapping, MergeReason.MAPPING_UPDATE, true); assertSame(indexService.mapperService().documentMapper("type1"), documentMapper); - CompressedXContent normsDisabledMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent normsDisabledMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("field") .field("type", "text") .field("norms", false) .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); indexService.mapperService().merge("type3", normsDisabledMapping, MergeReason.MAPPING_UPDATE, true); assertNotSame(indexService.mapperService().documentMapper("type1"), documentMapper); @@ -260,10 +262,10 @@ public void testAllEnabled() throws Exception { IndexService indexService = createIndex("test"); assertFalse(indexService.mapperService().allEnabled()); - CompressedXContent enabledAll = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent enabledAll = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("_all") .field("enabled", true) - .endObject().endObject().bytes()); + .endObject().endObject())); Exception e = expectThrows(MapperParsingException.class, () -> indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, enabledAll, @@ -316,12 +318,12 @@ public void testIndexSortWithNestedFields() throws IOException { assertThat(invalidNestedException.getMessage(), containsString("cannot have nested fields when index sort is activated")); IndexService indexService = createIndex("test", settings, "t", "foo", "type=keyword"); - CompressedXContent nestedFieldMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + CompressedXContent nestedFieldMapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("nested_field") .field("type", "nested") .endObject() - .endObject().endObject().bytes()); + .endObject().endObject())); invalidNestedException = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("t", nestedFieldMapping, MergeReason.MAPPING_UPDATE, true)); @@ -330,18 +332,18 @@ public void testIndexSortWithNestedFields() throws IOException { } public void testForbidMultipleTypes() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, randomBoolean()); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject().string(); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapperService.merge("type2", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, randomBoolean())); assertThat(e.getMessage(), Matchers.startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: ")); } public void testDefaultMappingIsDeprecated() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("_default_").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_default_").endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); mapperService.merge("_default_", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, randomBoolean()); assertWarnings("[_default_] mapping is deprecated since it is not useful anymore now that indexes " + diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java index 72b1c95d8bd02..5c8aec74542f3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -51,7 +52,7 @@ public void testExceptionForIncludeInAll() throws IOException { final MapperService currentMapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings, "test"); Exception e = expectThrows(MapperParsingException.class, () -> - currentMapperService.parse("type", new CompressedXContent(mapping.string()), true)); + currentMapperService.parse("type", new CompressedXContent(Strings.toString(mapping)), true)); assertEquals("[include_in_all] is not allowed for indices created on or after version 6.0.0 as [_all] is deprecated. " + "As a replacement, you can use an [copy_to] on mapping fields to create your own catch all field.", e.getMessage()); @@ -61,7 +62,7 @@ public void testExceptionForIncludeInAll() throws IOException { // Create the mapping service with an older index creation version final MapperService oldMapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings, "test"); // Should not throw an exception now - oldMapperService.parse("type", new CompressedXContent(mapping.string()), true); + oldMapperService.parse("type", new CompressedXContent(Strings.toString(mapping)), true); } private static XContentBuilder createMappingWithIncludeInAll() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java index 4d9323bddb1ad..9e31bd76c3016 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,7 +40,7 @@ public void testExceptionForCopyToInMultiFields() throws IOException { // first check that for newer versions we throw exception if copy_to is found withing multi field MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "test"); try { - mapperService.parse("type", new CompressedXContent(mapping.string()), true); + mapperService.parse("type", new CompressedXContent(Strings.toString(mapping)), true); fail("Parsing should throw an exception because the mapping contains a copy_to in a multi field"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("copy_to in multi fields is not allowed. Found the copy_to in field [c] which is within a multi field.")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldIncludeInAllMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldIncludeInAllMapperTests.java index c4195b776a6d9..70e0eeb7c6ecf 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldIncludeInAllMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldIncludeInAllMapperTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,7 +37,7 @@ public void testExceptionForIncludeInAllInMultiFields() throws IOException { // first check that for newer versions we throw exception if include_in_all is found withing multi field MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "test"); Exception e = expectThrows(MapperParsingException.class, () -> - mapperService.parse("type", new CompressedXContent(mapping.string()), true)); + mapperService.parse("type", new CompressedXContent(Strings.toString(mapping)), true)); assertEquals("include_in_all in multi fields is not allowed. Found the include_in_all in field [c] which is within a multi field.", e.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index 26fc15bf6621b..59c3825acc643 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -165,7 +166,7 @@ public void testMultiFieldsInConsistentOrder() throws Exception { builder = builder.startObject(multiFieldName).field("type", "text").endObject(); } builder = builder.endObject().endObject().endObject().endObject().endObject(); - String mapping = builder.string(); + String mapping = Strings.toString(builder); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); Arrays.sort(multiFieldNames); @@ -183,9 +184,9 @@ public void testMultiFieldsInConsistentOrder() throws Exception { } public void testObjectFieldNotAllowed() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field") .field("type", "text").startObject("fields").startObject("multi").field("type", "object").endObject().endObject() - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); try { parser.parse("type", new CompressedXContent(mapping)); @@ -196,9 +197,9 @@ public void testObjectFieldNotAllowed() throws Exception { } public void testNestedFieldNotAllowed() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties").startObject("my_field") .field("type", "text").startObject("fields").startObject("multi").field("type", "nested").endObject().endObject() - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); final DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); try { parser.parse("type", new CompressedXContent(mapping)); @@ -228,7 +229,7 @@ public void testMultiFieldWithDot() throws IOException { MapperService mapperService = createIndex("test").mapperService(); try { - mapperService.documentMapperParser().parse("my_type", new CompressedXContent(mapping.string())); + mapperService.documentMapperParser().parse("my_type", new CompressedXContent(Strings.toString(mapping))); fail("this should throw an exception because one field contains a dot"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("Field name [raw.foo] which is a multi field of [city] cannot contain '.'")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 8d793efb26f38..e495a6f60d315 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -23,6 +23,8 @@ import java.util.HashSet; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -52,37 +54,37 @@ protected Collection> getPlugins() { } public void testEmptyNested() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .nullField("nested1") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .nullField("nested1") + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(1)); - doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested").endArray() - .endObject() - .bytes(), + doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested").endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(1)); } public void testSingleNested() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -90,12 +92,12 @@ public void testSingleNested() throws Exception { ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); assertThat(nested1Mapper.nested().isNested(), equalTo(true)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startObject("nested1").field("field1", "1").field("field2", "2").endObject() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startObject("nested1").field("field1", "1").field("field2", "2").endObject() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(2)); @@ -106,15 +108,15 @@ public void testSingleNested() throws Exception { assertThat(doc.docs().get(1).get("field"), equalTo("value")); - doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").field("field2", "2").endObject() - .startObject().field("field1", "3").field("field2", "4").endObject() - .endArray() - .endObject() - .bytes(), + doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").field("field2", "2").endObject() + .startObject().field("field1", "3").field("field2", "4").endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(3)); @@ -129,11 +131,11 @@ public void testSingleNested() throws Exception { } public void testMultiNested() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested") .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -147,15 +149,15 @@ public void testMultiNested() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() - .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() + .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); @@ -181,11 +183,11 @@ public void testMultiNested() throws Exception { } public void testMultiObjectAndNested1() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested").field("include_in_parent", true) .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -199,15 +201,15 @@ public void testMultiObjectAndNested1() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() - .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() + .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); @@ -233,11 +235,11 @@ public void testMultiObjectAndNested1() throws Exception { } public void testMultiObjectAndNested2() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").field("include_in_parent", true).startObject("properties") .startObject("nested2").field("type", "nested").field("include_in_parent", true) .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -251,15 +253,15 @@ public void testMultiObjectAndNested2() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() - .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() + .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); @@ -285,11 +287,11 @@ public void testMultiObjectAndNested2() throws Exception { } public void testMultiRootAndNested1() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested").field("include_in_root", true) .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -303,15 +305,15 @@ public void testMultiRootAndNested1() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(true)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() - .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").startArray("nested2").startObject().field("field2", "2").endObject().startObject().field("field2", "3").endObject().endArray().endObject() + .startObject().field("field1", "4").startArray("nested2").startObject().field("field2", "5").endObject().startObject().field("field2", "6").endObject().endArray().endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(7)); @@ -342,21 +344,21 @@ public void testMultiRootAndNested1() throws Exception { * lead to duplicate fields on the root document. */ public void testMultipleLevelsIncludeRoot1() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").field("include_in_root", true).field("include_in_parent", true).startObject("properties") .startObject("nested2").field("type", "nested").field("include_in_root", true).field("include_in_parent", true) .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startArray("nested1") - .startObject().startArray("nested2").startObject().field("foo", "bar") - .endObject().endArray().endObject().endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startArray("nested1") + .startObject().startArray("nested2").startObject().field("foo", "bar") + .endObject().endArray().endObject().endArray() + .endObject()), XContentType.JSON)); final Collection fields = doc.rootDoc().getFields(); @@ -371,7 +373,7 @@ public void testMultipleLevelsIncludeRoot1() throws Exception { * {@code false} and {@code include_in_root} set to {@code true}. */ public void testMultipleLevelsIncludeRoot2() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested") .field("include_in_root", true).field("include_in_parent", true).startObject("properties") @@ -380,17 +382,17 @@ public void testMultipleLevelsIncludeRoot2() throws Exception { .startObject("nested3").field("type", "nested") .field("include_in_root", true).field("include_in_parent", true) .endObject().endObject().endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().startArray("nested1") - .startObject().startArray("nested2") - .startObject().startArray("nested3").startObject().field("foo", "bar") - .endObject().endArray().endObject().endArray().endObject().endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject().startArray("nested1") + .startObject().startArray("nested2") + .startObject().startArray("nested3").startObject().field("foo", "bar") + .endObject().endArray().endObject().endArray().endObject().endArray() + .endObject()), XContentType.JSON)); final Collection fields = doc.rootDoc().getFields(); @@ -398,11 +400,11 @@ public void testMultipleLevelsIncludeRoot2() throws Exception { } public void testNestedArrayStrict() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("nested1").field("type", "nested").field("dynamic", "strict").startObject("properties") .startObject("field1").field("type", "text") .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -411,15 +413,15 @@ public void testNestedArrayStrict() throws Exception { assertThat(nested1Mapper.nested().isNested(), equalTo(true)); assertThat(nested1Mapper.dynamic(), equalTo(Dynamic.STRICT)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject().field("field1", "1").endObject() - .startObject().field("field1", "4").endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .startArray("nested1") + .startObject().field("field1", "1").endObject() + .startObject().field("field1", "4").endObject() + .endArray() + .endObject()), XContentType.JSON)); assertThat(doc.docs().size(), equalTo(3)); @@ -433,11 +435,11 @@ public void testNestedArrayStrict() throws Exception { public void testLimitOfNestedFieldsPerIndex() throws Exception { Function mapping = type -> { try { - return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") + return Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested") .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -466,11 +468,11 @@ public void testLimitOfNestedFieldsPerIndex() throws Exception { public void testLimitOfNestedFieldsWithMultiTypePerIndex() throws Exception { Function mapping = type -> { try { - return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") + return Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") .startObject("nested1").field("type", "nested").startObject("properties") .startObject("nested2").field("type", "nested") .endObject().endObject().endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -483,8 +485,8 @@ public void testLimitOfNestedFieldsWithMultiTypePerIndex() throws Exception { // merging same fields, but different type is ok mapperService.merge("type2", new CompressedXContent(mapping.apply("type2")), MergeReason.MAPPING_UPDATE, false); // adding new fields from different type is not ok - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3") - .field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().endObject().string(); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3") + .field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false)); assertThat(e.getMessage(), containsString("Limit of nested fields [2] in index [test4] has been exceeded")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java index 8a46f24998db9..815388eeffc55 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java @@ -19,11 +19,11 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -32,38 +32,38 @@ public class NullValueObjectMappingTests extends ESSingleNodeTestCase { public void testNullValueObject() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("obj1").field("type", "object").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("obj1").endObject() - .field("value1", "test1") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("obj1").endObject() + .field("value1", "test1") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("value1"), equalTo("test1")); - doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("obj1") - .field("value1", "test1") - .endObject() - .bytes(), + doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("obj1") + .field("value1", "test1") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("value1"), equalTo("test1")); - doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("obj1").field("field", "value").endObject() - .field("value1", "test1") - .endObject() - .bytes(), + doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("obj1").field("field", "value").endObject() + .field("value1", "test1") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().get("obj1.field"), equalTo("value")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NullValueTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NullValueTests.java index bc054564a6863..d9502d8e8800c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NullValueTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NullValueTests.java @@ -1,5 +1,6 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; /* @@ -25,7 +26,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; @@ -36,7 +36,7 @@ public void testNullNullValue() throws Exception { String[] typesToTest = {"integer", "long", "double", "float", "short", "date", "ip", "keyword", "boolean", "byte"}; for (String type : typesToTest) { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") @@ -46,7 +46,7 @@ public void testNullNullValue() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); try { indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index d5b532395ee6e..7c1ff5d95601a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; @@ -48,19 +49,19 @@ protected void setTypeList() { @Override public void doTestDefaults(String type) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -76,19 +77,19 @@ public void doTestDefaults(String type) throws Exception { @Override public void doTestNotIndexed(String type) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -99,19 +100,19 @@ public void doTestNotIndexed(String type) throws Exception { @Override public void doTestNoDocValues(String type) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("doc_values", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -123,19 +124,19 @@ public void doTestNoDocValues(String type) throws Exception { @Override public void doTestStore(String type) throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", 123) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -152,19 +153,19 @@ public void doTestStore(String type) throws Exception { @Override public void doTestCoerce(String type) throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -175,19 +176,19 @@ public void doTestCoerce(String type) throws IOException { IndexableField dvField = fields[1]; assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("coerce", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper2.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("passed as String")); @@ -195,19 +196,19 @@ public void doTestCoerce(String type) throws IOException { @Override protected void doTestDecimalCoerce(String type) throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "7.89") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "7.89") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -222,35 +223,35 @@ public void testIgnoreMalformed() throws Exception { } private void doTestIgnoreMalformed(String type) throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "a") - .endObject() - .bytes(), + ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "a") + .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), containsString("For input string: \"a\"")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("ignore_malformed", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "a") - .endObject() - .bytes(), + ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "a") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -261,13 +262,13 @@ public void testRejectNorms() throws IOException { // not supported as of 5.0 for (String type : TYPES) { DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", type) .field("norms", random().nextBoolean()) .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [norms")); @@ -280,13 +281,13 @@ public void testRejectNorms() throws IOException { public void testRejectIndexOptions() throws IOException { for (String type : TYPES) { DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("foo") .field("type", type) .field("index_options", randomFrom(new String[]{"docs", "freqs", "positions", "offset"})) .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); parser.parse("type", new CompressedXContent(mapping)); assertWarnings( "index_options are deprecated for field [foo] of type [" + type + "] and will be removed in the next major version."); @@ -295,23 +296,23 @@ public void testRejectIndexOptions() throws IOException { @Override protected void doTestNullValue(String type) throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") .field("type", type) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); @@ -321,7 +322,7 @@ protected void doTestNullValue(String type) throws IOException { } else { missing = 123L; } - mapping = XContentFactory.jsonBuilder().startObject() + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -329,16 +330,16 @@ protected void doTestNullValue(String type) throws IOException { .field("null_value", missing) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); @@ -355,9 +356,9 @@ protected void doTestNullValue(String type) throws IOException { public void testEmptyName() throws IOException { // after version 5 for (String type : TYPES) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) @@ -425,29 +426,29 @@ private void parseRequest(NumberType type, BytesReference content) throws IOExce } private DocumentMapper createDocumentMapper(NumberType type) throws IOException { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", type.typeName()) + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", type.typeName()) + .endObject() .endObject() .endObject() - .endObject() - .endObject() - .string(); + .endObject()); return parser.parse("type", new CompressedXContent(mapping)); } private BytesReference createIndexRequest(Object value) throws IOException { if (value instanceof BigInteger) { - return XContentFactory.jsonBuilder() + return BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .rawField("field", new ByteArrayInputStream(value.toString().getBytes("UTF-8")), XContentType.JSON) - .endObject().bytes(); + .endObject()); } else { - return XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes(); + return BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 0e1bead111452..0df6f7a4fb78d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -19,33 +19,26 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ObjectMapper.Dynamic; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.Collection; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; public class ObjectMapperTests extends ESSingleNodeTestCase { public void testDifferentInnerObjectTokenFailure() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { @@ -68,45 +61,45 @@ public void testDifferentInnerObjectTokenFailure() throws Exception { } public void testEmptyArrayProperties() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("properties").endArray() - .endObject().endObject().string(); + .endObject().endObject()); createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); } public void testEmptyFieldsArrayMultiFields() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startObject("name") - .field("type", "text") - .startArray("fields") - .endArray() - .endObject() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startObject("name") + .field("type", "text") + .startArray("fields") + .endArray() + .endObject() + .endObject() + .endObject() + .endObject()); createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testFieldsArrayMultiFieldsShouldThrowException() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startObject("name") - .field("type", "text") - .startArray("fields") - .startObject().field("test", "string").endObject() - .startObject().field("test2", "string").endObject() - .endArray() + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startObject("name") + .field("type", "text") + .startArray("fields") + .startObject().field("test", "string").endObject() + .startObject().field("test2", "string").endObject() + .endArray() + .endObject() + .endObject() .endObject() - .endObject() - .endObject() - .endObject() - .string(); + .endObject()); try { createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); @@ -117,32 +110,32 @@ public void testFieldsArrayMultiFieldsShouldThrowException() throws Exception { } public void testEmptyFieldsArray() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startArray("fields") - .endArray() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startArray("fields") + .endArray() + .endObject() + .endObject() + .endObject()); createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testFieldsWithFilledArrayShouldThrowException() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startArray("fields") - .startObject().field("test", "string").endObject() - .startObject().field("test2", "string").endObject() - .endArray() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startArray("fields") + .startObject().field("test", "string").endObject() + .startObject().field("test2", "string").endObject() + .endArray() + .endObject() + .endObject() + .endObject()); try { createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); fail("Expected MapperParsingException"); @@ -152,57 +145,57 @@ public void testFieldsWithFilledArrayShouldThrowException() throws Exception { } public void testFieldPropertiesArray() throws Exception { - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("tweet") - .startObject("properties") - .startObject("name") - .field("type", "text") - .startObject("fields") - .startObject("raw") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("tweet") + .startObject("properties") + .startObject("name") + .field("type", "text") + .startObject("fields") + .startObject("raw") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject()); createIndex("test").mapperService().documentMapperParser().parse("tweet", new CompressedXContent(mapping)); } public void testMerge() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("foo") .field("type", "keyword") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); assertNull(mapper.root().includeInAll()); assertNull(mapper.root().dynamic()); - String update = XContentFactory.jsonBuilder().startObject() + String update = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .field("include_in_all", false) .field("dynamic", "strict") - .endObject().endObject().string(); + .endObject().endObject()); mapper = mapperService.merge("type", new CompressedXContent(update), MergeReason.MAPPING_UPDATE, false); assertFalse(mapper.root().includeInAll()); assertEquals(Dynamic.STRICT, mapper.root().dynamic()); } public void testEmptyName() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("") .startObject("properties") .startObject("name") .field("type", "text") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); // Empty name not allowed in index created after 5.0 IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java index d21827ee18cea..c0b284f263fd8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java @@ -21,7 +21,9 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -58,13 +60,13 @@ protected Collection> getPlugins() { } public void testParentSetInDocNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { - docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_parent", "1122").endObject().bytes(), XContentType.JSON)); + docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject().field("_parent", "1122").endObject()), XContentType.JSON)); fail("Expected failure to parse metadata field"); } catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("Field [_parent] is a metadata field and cannot be added inside a document")); @@ -72,11 +74,11 @@ public void testParentSetInDocNotAllowed() throws Exception { } public void testJoinFieldSet() throws Exception { - String parentMapping = XContentFactory.jsonBuilder().startObject().startObject("parent_type") - .endObject().endObject().string(); - String childMapping = XContentFactory.jsonBuilder().startObject().startObject("child_type") + String parentMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("parent_type") + .endObject().endObject()); + String childMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child_type") .startObject("_parent").field("type", "parent_type").endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test", Settings.builder().put("index.version.created", Version.V_5_6_0).build()); indexService.mapperService().merge("parent_type", new CompressedXContent(parentMapping), MergeReason.MAPPING_UPDATE, false); indexService.mapperService().merge("child_type", new CompressedXContent(childMapping), MergeReason.MAPPING_UPDATE, false); @@ -97,14 +99,14 @@ public void testJoinFieldSet() throws Exception { } public void testJoinFieldNotSet() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("x_field", "x_value") - .endObject() - .bytes(), XContentType.JSON)); + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("x_field", "x_value") + .endObject()), XContentType.JSON)); assertEquals(0, getNumberOfFieldWithParentPrefix(doc.rootDoc())); } @@ -121,7 +123,7 @@ public void testNoParentNullFieldCreatedIfNoParentSpecified() throws Exception { .startObject("properties") .endObject() .endObject().endObject(); - mapperService.merge("some_type", new CompressedXContent(mappingSource.string()), MergeReason.MAPPING_UPDATE, false); + mapperService.merge("some_type", new CompressedXContent(Strings.toString(mappingSource)), MergeReason.MAPPING_UPDATE, false); Set allFields = new HashSet<>(mapperService.simpleMatchToIndexNames("*")); assertTrue(allFields.contains("_parent")); assertFalse(allFields.contains("_parent#null")); @@ -140,20 +142,20 @@ private static int getNumberOfFieldWithParentPrefix(ParseContext.Document doc) { } public void testUpdateEagerGlobalOrds() throws IOException { - String parentMapping = XContentFactory.jsonBuilder().startObject().startObject("parent_type") - .endObject().endObject().string(); - String childMapping = XContentFactory.jsonBuilder().startObject().startObject("child_type") + String parentMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("parent_type") + .endObject().endObject()); + String childMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child_type") .startObject("_parent").field("type", "parent_type").endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("test", Settings.builder().put("index.version.created", Version.V_5_6_0).build()); indexService.mapperService().merge("parent_type", new CompressedXContent(parentMapping), MergeReason.MAPPING_UPDATE, false); indexService.mapperService().merge("child_type", new CompressedXContent(childMapping), MergeReason.MAPPING_UPDATE, false); assertTrue(indexService.mapperService().documentMapper("child_type").parentFieldMapper().fieldType().eagerGlobalOrdinals()); - String childMappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("child_type") + String childMappingUpdate = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("child_type") .startObject("_parent").field("type", "parent_type").field("eager_global_ordinals", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); indexService.mapperService().merge("child_type", new CompressedXContent(childMappingUpdate), MergeReason.MAPPING_UPDATE, false); assertFalse(indexService.mapperService().documentMapper("child_type").parentFieldMapper().fieldType().eagerGlobalOrdinals()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java index 4b705f97a31ba..54418850e5d4f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java @@ -22,6 +22,8 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.xcontent.ToXContent; @@ -116,16 +118,16 @@ public void doTestDefaults(String type) throws Exception { } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -147,16 +149,16 @@ protected void doTestNotIndexed(String type) throws Exception { } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -171,16 +173,16 @@ protected void doTestNoDocValues(String type) throws Exception { mapping = mapping.field("format", DATE_FORMAT); } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -197,16 +199,16 @@ protected void doTestStore(String type) throws Exception { mapping = mapping.field("format", DATE_FORMAT); } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -235,17 +237,17 @@ public void doTestCoerce(String type) throws IOException { mapping = mapping.field("format", DATE_FORMAT); } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .field(getToField(), getTo(type)) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -260,14 +262,14 @@ public void doTestCoerce(String type) throws IOException { mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field") .field("type", type).field("coerce", false).endObject().endObject().endObject().endObject(); - DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); - assertEquals(mapping.string(), mapper2.mappingSource().toString()); + assertEquals(Strings.toString(mapping), mapper2.mappingSource().toString()); ThrowingRunnable runnable = () -> mapper2 .parse(SourceToParse.source( - "test", "type", "1", XContentFactory.jsonBuilder().startObject().startObject("field") - .field(getFromField(), "5.2").field(getToField(), "10").endObject().endObject().bytes(), + "test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startObject("field") + .field(getFromField(), "5.2").field(getToField(), "10").endObject().endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); assertThat(e.getCause().getMessage(), anyOf(containsString("passed as String"), containsString("failed to parse date"), @@ -281,26 +283,26 @@ protected void doTestDecimalCoerce(String type) throws IOException { .startObject("properties").startObject("field").field("type", type); mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(GT_FIELD.getPreferredName(), "2.34") .field(LT_FIELD.getPreferredName(), "5.67") .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); - ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(GT_FIELD.getPreferredName(), "2") .field(LT_FIELD.getPreferredName(), "5") .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields1 = doc1.rootDoc().getFields("field"); @@ -318,17 +320,17 @@ protected void doTestNullValue(String type) throws IOException { } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); // test null value for min and max - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .nullField(getFromField()) .nullField(getToField()) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertEquals(3, doc.rootDoc().getFields("field").length); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -337,13 +339,13 @@ protected void doTestNullValue(String type) throws IOException { assertThat(storedField.stringValue(), containsString(expected)); // test null max value - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) .nullField(getToField()) .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); fields = doc.rootDoc().getFields("field"); @@ -365,11 +367,11 @@ protected void doTestNullValue(String type) throws IOException { assertThat(storedField.stringValue(), containsString(strVal)); // test null range - doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .nullField("field") - .endObject() - .bytes(), + doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject()), XContentType.JSON)); assertNull(doc.rootDoc().get("field")); } @@ -388,15 +390,15 @@ public void doTestNoBounds(String type) throws IOException { } mapping = mapping.endObject().endObject().endObject().endObject(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - assertEquals(mapping.string(), mapper.mappingSource().toString()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); + assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); // test no bounds specified - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -417,22 +419,22 @@ public void testIllegalArguments() throws Exception { .startObject("properties").startObject("field").field("type", RangeFieldMapper.RangeType.INTEGER.name) .field("format", DATE_FORMAT).endObject().endObject().endObject().endObject(); - ThrowingRunnable runnable = () -> parser.parse("type", new CompressedXContent(mapping.string())); + ThrowingRunnable runnable = () -> parser.parse("type", new CompressedXContent(Strings.toString(mapping))); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable); assertThat(e.getMessage(), containsString("should not define a dateTimeFormatter")); } public void testSerializeDefaults() throws Exception { for (String type : TYPES) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); RangeFieldMapper mapper = (RangeFieldMapper) docMapper.root().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.doXContentBody(builder, true, ToXContent.EMPTY_PARAMS); - String got = builder.endObject().string(); + String got = Strings.toString(builder.endObject()); // if type is date_range we check that the mapper contains the default format and locale // otherwise it should not contain a locale or format diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index 72195fbd954fc..9594a001c9c06 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.network.InetAddresses; @@ -54,14 +55,14 @@ public class RangeFieldQueryStringQueryBuilderTests extends AbstractQueryTestCas @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { - mapperService.merge("_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("_doc", + mapperService.merge("_doc", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("_doc", INTEGER_RANGE_FIELD_NAME, "type=integer_range", LONG_RANGE_FIELD_NAME, "type=long_range", FLOAT_RANGE_FIELD_NAME, "type=float_range", DOUBLE_RANGE_FIELD_NAME, "type=double_range", DATE_RANGE_FIELD_NAME, "type=date_range", IP_RANGE_FIELD_NAME, "type=ip_range" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + ))), MapperService.MergeReason.MAPPING_UPDATE, false); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java index a76d5d01316fb..e17fb9cc4b022 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -29,99 +30,99 @@ public class RootObjectMapperTests extends ESSingleNodeTestCase { public void testNumericDetection() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("numeric_detection", false) .endObject() - .endObject().string(); + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); // update with a different explicit value - String mapping2 = XContentFactory.jsonBuilder() + String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("numeric_detection", true) .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping2, mapper.mappingSource().toString()); // update with an implicit value: no change - String mapping3 = XContentFactory.jsonBuilder() + String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping2, mapper.mappingSource().toString()); } public void testDateDetection() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("date_detection", true) .endObject() - .endObject().string(); + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); // update with a different explicit value - String mapping2 = XContentFactory.jsonBuilder() + String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("date_detection", false) .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping2, mapper.mappingSource().toString()); // update with an implicit value: no change - String mapping3 = XContentFactory.jsonBuilder() + String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping2, mapper.mappingSource().toString()); } public void testDateFormatters() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("dynamic_date_formats", Arrays.asList("YYYY-MM-dd")) .endObject() - .endObject().string(); + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); // no update if formatters are not set explicitly - String mapping2 = XContentFactory.jsonBuilder() + String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); - String mapping3 = XContentFactory.jsonBuilder() + String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("dynamic_date_formats", Arrays.asList()) .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping3, mapper.mappingSource().toString()); } public void testDynamicTemplates() throws Exception { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startArray("dynamic_templates") @@ -135,26 +136,26 @@ public void testDynamicTemplates() throws Exception { .endObject() .endArray() .endObject() - .endObject().string(); + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); // no update if templates are not set explicitly - String mapping2 = XContentFactory.jsonBuilder() + String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); - String mapping3 = XContentFactory.jsonBuilder() + String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("dynamic_templates", Arrays.asList()) .endObject() - .endObject().string(); + .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping3, mapper.mappingSource().toString()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java index fb98f42f105eb..1b83b1bcb5b67 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java @@ -19,13 +19,11 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.hamcrest.Matchers.equalTo; @@ -33,15 +31,15 @@ public class RoutingFieldMapperTests extends ESSingleNodeTestCase { public void testRoutingMapper() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(), + ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "value") + .endObject()), XContentType.JSON).routing("routing_value")); assertThat(doc.rootDoc().get("_routing"), equalTo("routing_value")); @@ -49,12 +47,12 @@ public void testRoutingMapper() throws Exception { } public void testIncludeInObjectNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { - docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_routing", "foo").endObject().bytes(),XContentType.JSON)); + docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject().field("_routing", "foo").endObject()),XContentType.JSON)); fail("Expected failure to parse metadata field"); } catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("Field [_routing] is a metadata field and cannot be added inside a document")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 85017cb35cd39..56db5e3fa9091 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -22,7 +22,9 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -48,39 +50,39 @@ protected Collection> getPlugins() { } public void testNoFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("field", "value") - .endObject().bytes(), + .endObject()), XContentType.JSON)); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); documentMapper = parser.parse("type", new CompressedXContent(mapping)); - doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.smileBuilder().startObject() + doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.smileBuilder().startObject() .field("field", "value") - .endObject().bytes(), + .endObject()), XContentType.SMILE)); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.SMILE)); } public void testIncludes() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", new String[]{"path1*"}).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() .startObject("path2").field("field2", "value2").endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField sourceField = doc.rootDoc().getField("_source"); @@ -93,16 +95,16 @@ public void testIncludes() throws Exception { } public void testExcludes() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", new String[]{"path1*"}).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() .startObject("path2").field("field2", "value2").endObject() - .endObject().bytes(), + .endObject()), XContentType.JSON)); IndexableField sourceField = doc.rootDoc().getField("_source"); @@ -115,9 +117,9 @@ public void testExcludes() throws Exception { } public void testDefaultMappingAndNoMapping() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper mapper = parser.parse("my_type", null, defaultMapping); @@ -143,13 +145,13 @@ public void testDefaultMappingAndNoMapping() throws Exception { } public void testDefaultMappingAndWithMappingOverride() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("my_type") .startObject("_source").field("enabled", true).endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser() .parse("my_type", new CompressedXContent(mapping), defaultMapping); @@ -158,9 +160,9 @@ public void testDefaultMappingAndWithMappingOverride() throws Exception { } public void testDefaultMappingAndNoMappingWithMapperService() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_6_0).build(); MapperService mapperService = createIndex("test", settings).mapperService(); @@ -172,17 +174,17 @@ public void testDefaultMappingAndNoMappingWithMapperService() throws Exception { } public void testDefaultMappingAndWithMappingOverrideWithMapperService() throws Exception { - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_6_0).build(); MapperService mapperService = createIndex("test", settings).mapperService(); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("my_type") .startObject("_source").field("enabled", true).endObject() - .endObject().endObject().string(); + .endObject().endObject()); mapperService.merge("my_type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); DocumentMapper mapper = mapperService.documentMapper("my_type"); @@ -210,31 +212,31 @@ void assertConflicts(String mapping1, String mapping2, DocumentMapperParser pars public void testEnabledNotUpdateable() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); // using default of true - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(mapping1, mapping2, parser, "Cannot update enabled setting for [_source]"); // not changing is ok - String mapping3 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("enabled", true).endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(mapping1, mapping3, parser); } public void testIncludesNotUpdateable() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(defaultMapping, mapping1, parser, "Cannot update includes setting for [_source]"); assertConflicts(mapping1, defaultMapping, parser, "Cannot update includes setting for [_source]"); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*", "bar.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(mapping1, mapping2, parser, "Cannot update includes setting for [_source]"); // not changing is ok @@ -243,16 +245,16 @@ public void testIncludesNotUpdateable() throws Exception { public void testExcludesNotUpdateable() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + String defaultMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(defaultMapping, mapping1, parser, "Cannot update excludes setting for [_source]"); assertConflicts(mapping1, defaultMapping, parser, "Cannot update excludes setting for [_source]"); - String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*", "bar.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertConflicts(mapping1, mapping2, parser, "Cannot update excludes setting for [_source]"); // not changing is ok @@ -261,27 +263,27 @@ public void testExcludesNotUpdateable() throws Exception { public void testComplete() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); assertTrue(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("enabled", false).endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*").endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete()); } public void testSourceObjectContainsExtraTokens() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); try { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 438ccd5fa8688..9be8fc53aba64 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -24,15 +24,14 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.RAMDirectory; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService.MergeReason; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.Collections; @@ -43,41 +42,41 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { public void testBytesAndNumericRepresentation() throws Exception { IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("field1").field("type", "byte").field("store", true).endObject() - .startObject("field2").field("type", "short").field("store", true).endObject() - .startObject("field3").field("type", "integer").field("store", true).endObject() - .startObject("field4").field("type", "float").field("store", true).endObject() - .startObject("field5").field("type", "long").field("store", true).endObject() - .startObject("field6").field("type", "double").field("store", true).endObject() - .startObject("field7").field("type", "ip").field("store", true).endObject() - .startObject("field8").field("type", "ip").field("store", true).endObject() - .startObject("field9").field("type", "date").field("store", true).endObject() - .startObject("field10").field("type", "boolean").field("store", true).endObject() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field1").field("type", "byte").field("store", true).endObject() + .startObject("field2").field("type", "short").field("store", true).endObject() + .startObject("field3").field("type", "integer").field("store", true).endObject() + .startObject("field4").field("type", "float").field("store", true).endObject() + .startObject("field5").field("type", "long").field("store", true).endObject() + .startObject("field6").field("type", "double").field("store", true).endObject() + .startObject("field7").field("type", "ip").field("store", true).endObject() + .startObject("field8").field("type", "ip").field("store", true).endObject() + .startObject("field9").field("type", "date").field("store", true).endObject() + .startObject("field10").field("type", "boolean").field("store", true).endObject() + .endObject() + .endObject() + .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", 1) - .field("field2", 1) - .field("field3", 1) - .field("field4", 1.1) - .startArray("field5").value(1).value(2).value(3).endArray() - .field("field6", 1.1) - .field("field7", "192.168.1.1") - .field("field8", "2001:db8::2:1") - .field("field9", "2016-04-05") - .field("field10", true) - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", 1) + .field("field2", 1) + .field("field3", 1) + .field("field4", 1.1) + .startArray("field5").value(1).value(2).value(3).endArray() + .field("field6", 1.1) + .field("field7", "192.168.1.1") + .field("field8", "2001:db8::2:1") + .field("field9", "2016-04-05") + .field("field10", true) + .endObject()), XContentType.JSON)); writer.addDocument(doc.rootDoc()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 43d1aab3d133e..9294342075dcf 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -28,12 +28,13 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.ToXContent; @@ -81,19 +82,19 @@ protected Collection> getPlugins() { } public void testDefaults() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -113,19 +114,19 @@ public void testDefaults() throws IOException { } public void testEnableStore() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("store", true).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -134,19 +135,19 @@ public void testEnableStore() throws IOException { } public void testDisableIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("index", false).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -154,22 +155,22 @@ public void testDisableIndex() throws IOException { } public void testDisableNorms() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("norms", false) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); @@ -188,7 +189,7 @@ public void testIndexOptions() throws IOException { for (String option : supportedOptions.keySet()) { mappingBuilder.startObject(option).field("type", "text").field("index_options", option).endObject(); } - String mapping = mappingBuilder.endObject().endObject().endObject().string(); + String mapping = Strings.toString(mappingBuilder.endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -196,7 +197,7 @@ public void testIndexOptions() throws IOException { for (String option : supportedOptions.keySet()) { jsonDoc.field(option, "1234"); } - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", jsonDoc.endObject().bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc.endObject()), XContentType.JSON)); for (Map.Entry entry : supportedOptions.entrySet()) { @@ -209,20 +210,20 @@ public void testIndexOptions() throws IOException { } public void testDefaultPositionIncrementGap() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .array("field", new String[] {"a", "b"}) - .endObject() - .bytes(), + SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("field", new String[] {"a", "b"}) + .endObject()), XContentType.JSON); ParsedDocument doc = mapper.parse(sourceToParse); @@ -248,23 +249,23 @@ public void testDefaultPositionIncrementGap() throws IOException { public void testPositionIncrementGap() throws IOException { final int positionIncrementGap = randomIntBetween(1, 1000); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("position_increment_gap", positionIncrementGap) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .array("field", new String[]{"a", "b"}) - .endObject() - .bytes(), + SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("field", new String[]{"a", "b"}) + .endObject()), XContentType.JSON); ParsedDocument doc = mapper.parse(sourceToParse); @@ -289,62 +290,62 @@ public void testPositionIncrementGap() throws IOException { } public void testSearchAnalyzerSerialization() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "standard") .field("search_analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); // special case: default index analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "default") .field("search_analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); // special case: default search analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "keyword") .field("search_analyzer", "default") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); XContentBuilder builder = XContentFactory.jsonBuilder(); @@ -352,14 +353,14 @@ public void testSearchAnalyzerSerialization() throws IOException { mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"))); builder.endObject(); - String mappingString = builder.string(); + String mappingString = Strings.toString(builder); assertTrue(mappingString.contains("analyzer")); assertTrue(mappingString.contains("search_analyzer")); assertTrue(mappingString.contains("search_quote_analyzer")); } public void testSearchQuoteAnalyzerSerialization() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") @@ -367,13 +368,13 @@ public void testSearchQuoteAnalyzerSerialization() throws IOException { .field("search_analyzer", "standard") .field("search_quote_analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); // special case: default index/search analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "text") @@ -381,14 +382,14 @@ public void testSearchQuoteAnalyzerSerialization() throws IOException { .field("search_analyzer", "default") .field("search_quote_analyzer", "keyword") .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); } public void testTermVectors() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") @@ -415,20 +416,20 @@ public void testTermVectors() throws IOException { .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "1234") - .field("field2", "1234") - .field("field3", "1234") - .field("field4", "1234") - .field("field5", "1234") - .field("field6", "1234") - .endObject() - .bytes(), + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", "1234") + .field("field2", "1234") + .field("field3", "1234") + .field("field4", "1234") + .field("field5", "1234") + .field("field6", "1234") + .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false)); @@ -463,12 +464,12 @@ public void testTermVectors() throws IOException { } public void testEagerGlobalOrdinals() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("eager_global_ordinals", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -477,11 +478,11 @@ public void testEagerGlobalOrdinals() throws IOException { } public void testFielddata() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, disabledMapper.mappingSource().toString()); @@ -489,32 +490,32 @@ public void testFielddata() throws IOException { () -> disabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder("test")); assertThat(e.getMessage(), containsString("Fielddata is disabled")); - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("fielddata", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, enabledMapper.mappingSource().toString()); enabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder("test"); // no exception this time - String illegalMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("index", false) .field("fielddata", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(illegalMapping))); assertThat(ex.getMessage(), containsString("Cannot enable fielddata on a [text] field that is not indexed")); } public void testFrequencyFilter() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("fielddata", true) @@ -523,7 +524,7 @@ public void testFrequencyFilter() throws IOException { .field("min_segment_size", 1000) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -535,7 +536,7 @@ public void testFrequencyFilter() throws IOException { } public void testNullConfigValuesFail() throws MapperParsingException, IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("field") @@ -543,19 +544,19 @@ public void testNullConfigValuesFail() throws MapperParsingException, IOExceptio .field("analyzer", (String) null) .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Exception e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertEquals("[analyzer] must not have a [null] value", e.getMessage()); } public void testNotIndexedFieldPositionIncrement() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("index", false) .field("position_increment_gap", 10) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); @@ -564,12 +565,12 @@ public void testNotIndexedFieldPositionIncrement() throws IOException { public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOException { for (String indexOptions : Arrays.asList("docs", "freqs")) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("index_options", indexOptions) .field("position_increment_gap", 10) - .endObject().endObject().endObject().endObject().string(); + .endObject().endObject().endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); @@ -578,14 +579,14 @@ public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOExcept } public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") .startObject("") .field("type", "text") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); // Empty name not allowed in index created after 5.0 IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -602,7 +603,7 @@ public void testIndexPrefixMapping() throws IOException { }, null); { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -611,7 +612,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 10) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); @@ -624,11 +625,11 @@ public void testIndexPrefixMapping() throws IOException { CONSTANT_SCORE_REWRITE, queryShardContext); assertEquals(new PrefixQuery(new Term("field", "internationalisatio")), q); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "Some English text that is going to be very useful") - .endObject() - .bytes(), + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "Some English text that is going to be very useful") + .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field._index_prefix"); @@ -636,13 +637,13 @@ public void testIndexPrefixMapping() throws IOException { } { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") .startObject("index_prefix").endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); CompressedXContent json = new CompressedXContent(mapping); DocumentMapper mapper = parser.parse("type", json); @@ -661,7 +662,7 @@ public void testIndexPrefixMapping() throws IOException { indexService.mapperService().merge("type", json, MergeReason.MAPPING_UPDATE, true); - String badUpdate = XContentFactory.jsonBuilder().startObject().startObject("type") + String badUpdate = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -670,7 +671,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 10) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { indexService.mapperService() @@ -680,7 +681,7 @@ public void testIndexPrefixMapping() throws IOException { } { - String illegalMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -692,7 +693,7 @@ public void testIndexPrefixMapping() throws IOException { .startObject("_index_prefix").field("type", "text").endObject() .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { indexService.mapperService() @@ -703,7 +704,7 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -712,7 +713,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 10) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); @@ -720,7 +721,7 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -729,7 +730,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 10) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); @@ -737,7 +738,7 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") @@ -746,7 +747,7 @@ public void testIndexPrefixMapping() throws IOException { .field("max_chars", 25) .endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); @@ -754,13 +755,13 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("analyzer", "english") .field("index_prefix", (String) null) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); @@ -768,13 +769,13 @@ public void testIndexPrefixMapping() throws IOException { } { - String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") .field("index", "false") .startObject("index_prefix").endObject() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(badConfigMapping)) ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingOnClusterIT.java b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingOnClusterIT.java index 72242a7676baf..24b0ed05ce94f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingOnClusterIT.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingOnClusterIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -79,7 +80,7 @@ public void testUpdatingAllSettingsOnOlderIndex() throws Exception { .endObject() .endObject(); String errorMessage = "[_all] enabled is true now encountering false"; - testConflict(mapping.string(), mappingUpdate.string(), Version.V_5_0_0, errorMessage); + testConflict(Strings.toString(mapping), Strings.toString(mappingUpdate), Version.V_5_0_0, errorMessage); } public void testUpdatingAllSettingsOnOlderIndexDisabledToEnabled() throws Exception { @@ -98,7 +99,7 @@ public void testUpdatingAllSettingsOnOlderIndexDisabledToEnabled() throws Except .endObject() .endObject(); String errorMessage = "[_all] enabled is false now encountering true"; - testConflict(mapping.string(), mappingUpdate.string(), Version.V_5_0_0, errorMessage); + testConflict(Strings.toString(mapping), Strings.toString(mappingUpdate), Version.V_5_0_0, errorMessage); } private void compareMappingOnNodes(GetMappingsResponse previousMapping) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java index c6a1eae036ada..5dab88ab487b2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -76,7 +78,7 @@ protected void testConflictWhileMergingAndMappingUnchanged(XContentBuilder mappi CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); // simulate like in MetaDataMappingService#putMapping try { - indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); + indexService.mapperService().merge("type", new CompressedXContent(BytesReference.bytes(mappingUpdate)), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -97,14 +99,14 @@ public void testConflictSameType() throws Exception { .endObject().endObject().endObject(); try { - mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); } try { - mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); @@ -125,7 +127,7 @@ public void testConflictNewType() throws Exception { .endObject().endObject().endObject(); try { - mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type2", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -133,7 +135,7 @@ public void testConflictNewType() throws Exception { } try { - mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type2", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -154,15 +156,15 @@ public void testConflictNewTypeUpdate() throws Exception { MapperService mapperService = createIndex("test", Settings.builder().put("index.version.created", Version.V_5_6_0).build()).mapperService(); - mapperService.merge("type1", new CompressedXContent(mapping1.string()), MapperService.MergeReason.MAPPING_UPDATE, false); - mapperService.merge("type2", new CompressedXContent(mapping2.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type1", new CompressedXContent(Strings.toString(mapping1)), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type2", new CompressedXContent(Strings.toString(mapping2)), MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder update = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties").startObject("foo").field("type", "double").endObject() .endObject().endObject().endObject(); try { - mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type2", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -170,7 +172,7 @@ public void testConflictNewTypeUpdate() throws Exception { } try { - mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type2", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -190,14 +192,14 @@ public void testReuseMetaField() throws IOException { MapperService mapperService = createIndex("test", Settings.builder().build()).mapperService(); try { - mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); } try { - mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); @@ -205,22 +207,22 @@ public void testReuseMetaField() throws IOException { } public void testRejectFieldDefinedTwice() throws IOException { - String mapping1 = XContentFactory.jsonBuilder().startObject() + String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type1") .startObject("properties") .startObject("foo") .field("type", "object") .endObject() .endObject() - .endObject().endObject().string(); - String mapping2 = XContentFactory.jsonBuilder().startObject() + .endObject().endObject()); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type2") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); MapperService mapperService1 = createIndex("test1").mapperService(); mapperService1.merge("type1", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE, false); diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index 96d756f008366..99713c140c9e0 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; @@ -121,7 +122,7 @@ protected GetResponse executeGet(GetRequest getRequest) { builder.field(expectedShapePath, indexedShapeToReturn); builder.field(randomAlphaOfLengthBetween(10, 20), "something"); builder.endObject(); - json = builder.string(); + json = Strings.toString(builder); } catch (IOException ex) { throw new ElasticsearchException("boom", ex); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 0fcd395f19673..58c1174235b12 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.Queries; @@ -349,10 +350,10 @@ public void testExceptionUsingAnalyzerOnNumericField() { @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { - mapperService.merge("_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef( + mapperService.merge("_doc", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef( "_doc", "string_boost", "type=text,boost=4", "string_no_pos", - "type=text,index_options=docs").string() + "type=text,index_options=docs")) ), MapperService.MergeReason.MAPPING_UPDATE, false); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 922aa9a682f45..de044d5879312 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -327,7 +328,7 @@ public void testItemCopy() throws IOException { public void testItemFromXContent() throws IOException { Item expectedItem = generateRandomItem(); - String json = expectedItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string(); + String json = Strings.toString(expectedItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); XContentParser parser = createParser(JsonXContent.jsonXContent, json); Item newItem = Item.parse(parser, new Item()); assertEquals(expectedItem, newItem); diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index 31a749161074a..dde97fff86ce7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; @@ -58,7 +59,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertTrue(e.getMessage().contains("spanFirst must have [end] set")); } { @@ -68,7 +69,7 @@ public void testParseEnd() throws IOException { builder.endObject(); builder.endObject(); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertTrue(e.getMessage().contains("spanFirst must have [match] span query clause")); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java index 961d6092d76e5..0536dae6dfa39 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.spans.SpanNotQuery; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.internal.SearchContext; @@ -107,7 +108,7 @@ public void testParseDist() throws IOException { builder.field("dist", 3); builder.endObject(); builder.endObject(); - SpanNotQueryBuilder query = (SpanNotQueryBuilder)parseQuery(builder.string()); + SpanNotQueryBuilder query = (SpanNotQueryBuilder)parseQuery(Strings.toString(builder)); assertThat(query.pre(), equalTo(3)); assertThat(query.post(), equalTo(3)); assertNotNull(query.includeQuery()); @@ -128,7 +129,7 @@ public void testParserExceptions() throws IOException { builder.endObject(); builder.endObject(); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertThat(e.getDetailedMessage(), containsString("spanNot must have [include]")); } { @@ -142,7 +143,7 @@ public void testParserExceptions() throws IOException { builder.endObject(); builder.endObject(); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertThat(e.getDetailedMessage(), containsString("spanNot must have [exclude]")); } { @@ -159,7 +160,7 @@ public void testParserExceptions() throws IOException { builder.endObject(); builder.endObject(); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(builder.string())); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(Strings.toString(builder))); assertThat(e.getDetailedMessage(), containsString("spanNot can either use [dist] or [pre] & [post] (or none)")); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index c945e595213fd..baacd13809b22 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -191,7 +192,7 @@ public GetResponse executeGet(GetRequest getRequest) { builder.startObject(); builder.array(termsPath, randomTerms.toArray(new Object[randomTerms.size()])); builder.endObject(); - json = builder.string(); + json = Strings.toString(builder); } catch (IOException ex) { throw new ElasticsearchException("boom", ex); } @@ -226,9 +227,9 @@ public void testNumeric() throws IOException { } public void testTermsQueryWithMultipleFields() throws IOException { - String query = XContentFactory.jsonBuilder().startObject() + String query = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("terms").array("foo", 123).array("bar", 456).endObject() - .endObject().string(); + .endObject()); ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query)); assertEquals("[" + TermsQueryBuilder.NAME + "] query does not support multiple fields", e.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index 61336028779d9..c84449e18761c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -21,7 +21,6 @@ import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; @@ -38,6 +37,7 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.MapperService; @@ -74,9 +74,9 @@ protected Collection> getPlugins() { @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { String docType = "_doc"; - mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType, + mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType, "m_s_m", "type=long" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + ))), MapperService.MergeReason.MAPPING_UPDATE, false); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index a2865cfa129b5..40f6605edf11d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; @@ -487,7 +488,7 @@ public void testProperErrorMessageWhenMissingFunction() throws IOException { public void testWeight1fStillProducesWeightFunction() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - String queryString = jsonBuilder().startObject() + String queryString = Strings.toString(jsonBuilder().startObject() .startObject("function_score") .startArray("functions") .startObject() @@ -498,7 +499,7 @@ public void testWeight1fStillProducesWeightFunction() throws IOException { .endObject() .endArray() .endObject() - .endObject().string(); + .endObject()); QueryBuilder query = parseQuery(queryString); assertThat(query, instanceOf(FunctionScoreQueryBuilder.class)); FunctionScoreQueryBuilder functionScoreQueryBuilder = (FunctionScoreQueryBuilder) query; @@ -523,20 +524,20 @@ public void testWeight1fStillProducesWeightFunction() throws IOException { } public void testProperErrorMessagesForMisplacedWeightsAndFunctions() throws IOException { - String query = jsonBuilder().startObject().startObject("function_score") + String query = Strings.toString(jsonBuilder().startObject().startObject("function_score") .startArray("functions") .startObject().startObject("script_score").field("script", "3").endObject().endObject() .endArray() .field("weight", 2) - .endObject().endObject().string(); + .endObject().endObject()); expectParsingException(query, "[you can either define [functions] array or a single function, not both. already " + "found [functions] array, now encountering [weight].]"); - query = jsonBuilder().startObject().startObject("function_score") + query = Strings.toString(jsonBuilder().startObject().startObject("function_score") .field("weight", 2) .startArray("functions") .startObject().endObject() .endArray() - .endObject().endObject().string(); + .endObject().endObject()); expectParsingException(query, "[you can either define [functions] array or a single function, not both. already found " + "[weight], now encountering [functions].]"); } diff --git a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java index f4d4ea790bc50..109f9cbd686c5 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -68,7 +69,7 @@ public void testXContentRepresentationOfUnlimitedRequestsPerSecond() throws IOEx BulkByScrollTask.Status status = new BulkByScrollTask.Status(null, 0, 0, 0, 0, 0, 0, 0, 0, 0, timeValueMillis(0), Float.POSITIVE_INFINITY, null, timeValueMillis(0)); status.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertThat(builder.string(), containsString("\"requests_per_second\":-1")); + assertThat(Strings.toString(builder), containsString("\"requests_per_second\":-1")); } public void testXContentRepresentationOfUnfinishedSlices() throws IOException { @@ -78,7 +79,7 @@ public void testXContentRepresentationOfUnfinishedSlices() throws IOException { BulkByScrollTask.Status status = new BulkByScrollTask.Status( Arrays.asList(null, null, new BulkByScrollTask.StatusOrException(completedStatus)), null); status.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertThat(builder.string(), containsString("\"slices\":[null,null,{\"slice_id\":2")); + assertThat(Strings.toString(builder), containsString("\"slices\":[null,null,{\"slice_id\":2")); } public void testXContentRepresentationOfSliceFailures() throws IOException { @@ -87,7 +88,7 @@ public void testXContentRepresentationOfSliceFailures() throws IOException { BulkByScrollTask.Status status = new BulkByScrollTask.Status(Arrays.asList(null, null, new BulkByScrollTask.StatusOrException(e)), null); status.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertThat(builder.string(), containsString("\"slices\":[null,null,{\"type\":\"exception\"")); + assertThat(Strings.toString(builder), containsString("\"slices\":[null,null,{\"type\":\"exception\"")); } public void testMergeStatuses() { diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java index 15fdbe828b009..4f410dc6d2690 100644 --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.search.geo; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -94,7 +95,7 @@ public void testInvalidPointEmbeddedObject() throws IOException { content.endObject(); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); @@ -106,7 +107,7 @@ public void testInvalidPointLatHashMix() throws IOException { content.field("lat", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); @@ -119,7 +120,7 @@ public void testInvalidPointLonHashMix() throws IOException { content.field("lon", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); @@ -132,7 +133,7 @@ public void testInvalidField() throws IOException { content.field("lon", 0).field("lat", 0).field("test", 0); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); @@ -144,7 +145,7 @@ private XContentParser objectLatLon(double lat, double lon) throws IOException { content.startObject(); content.field("lat", lat).field("lon", lon); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); return parser; } @@ -152,7 +153,7 @@ private XContentParser objectLatLon(double lat, double lon) throws IOException { private XContentParser arrayLatLon(double lat, double lon) throws IOException { XContentBuilder content = JsonXContent.contentBuilder(); content.startArray().value(lon).value(lat).endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); return parser; } @@ -160,7 +161,7 @@ private XContentParser arrayLatLon(double lat, double lon) throws IOException { private XContentParser stringLatLon(double lat, double lon) throws IOException { XContentBuilder content = JsonXContent.contentBuilder(); content.value(Double.toString(lat) + ", " + Double.toString(lon)); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); return parser; } @@ -168,7 +169,7 @@ private XContentParser stringLatLon(double lat, double lon) throws IOException { private XContentParser geohash(double lat, double lon) throws IOException { XContentBuilder content = JsonXContent.contentBuilder(); content.value(stringEncode(lon, lat)); - XContentParser parser = createParser(JsonXContent.jsonXContent, content.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); parser.nextToken(); return parser; } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 2afd12a32cc42..5b411e70d61aa 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; @@ -1154,7 +1155,7 @@ public void testShardStats() throws IOException { builder.startObject(); stats.toXContent(builder, EMPTY_PARAMS); builder.endObject(); - String xContent = builder.string(); + String xContent = Strings.toString(builder); StringBuilder expectedSubSequence = new StringBuilder("\"shard_path\":{\"state_path\":\""); expectedSubSequence.append(shard.shardPath().getRootStatePath().toString()); expectedSubSequence.append("\",\"data_path\":\""); @@ -2363,12 +2364,12 @@ public void testEstimateTotalDocSize() throws Exception { int numDoc = randomIntBetween(100, 200); for (int i = 0; i < numDoc; i++) { - String doc = XContentFactory.jsonBuilder() + String doc = Strings.toString(XContentFactory.jsonBuilder() .startObject() .field("count", randomInt()) .field("point", randomFloat()) .field("description", randomUnicodeOfCodepointLength(100)) - .endObject().string(); + .endObject()); indexDoc(indexShard, "doc", Integer.toString(i), doc); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index 433f662062735..12c3804a1a7b0 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.resync.ResyncReplicationResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -192,7 +193,7 @@ public void testStatusReportsCorrectNumbers() throws IOException { PrimaryReplicaSyncer.ResyncTask.Status status = task.getStatus(); XContentBuilder jsonBuilder = XContentFactory.jsonBuilder(); status.toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS); - String jsonString = jsonBuilder.string(); + String jsonString = Strings.toString(jsonBuilder); assertThat(jsonString, containsString("\"phase\":\"" + task.getPhase() + "\"")); assertThat(jsonString, containsString("\"totalOperations\":" + task.getTotalOperations())); assertThat(jsonString, containsString("\"resyncedOperations\":" + task.getResyncedOperations())); diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index 3e7f4650c3e6d..2ab905a2dd526 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.similarities.LMJelinekMercerSimilarity; import org.apache.lucene.search.similarities.LambdaTTF; import org.apache.lucene.search.similarities.NormalizationH2; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -66,11 +67,11 @@ public void testResolveDefaultSimilarities() { } public void testResolveSimilaritiesFromMapping_classic() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "classic") @@ -85,11 +86,11 @@ public void testResolveSimilaritiesFromMapping_classic() throws IOException { } public void testResolveSimilaritiesFromMapping_bm25() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "BM25") @@ -108,11 +109,11 @@ public void testResolveSimilaritiesFromMapping_bm25() throws IOException { } public void testResolveSimilaritiesFromMapping_boolean() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "boolean").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("foo", Settings.EMPTY); DocumentMapper documentMapper = indexService.mapperService() @@ -123,11 +124,11 @@ public void testResolveSimilaritiesFromMapping_boolean() throws IOException { } public void testResolveSimilaritiesFromMapping_DFR() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "DFR") @@ -148,11 +149,11 @@ public void testResolveSimilaritiesFromMapping_DFR() throws IOException { } public void testResolveSimilaritiesFromMapping_IB() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "IB") @@ -173,11 +174,11 @@ public void testResolveSimilaritiesFromMapping_IB() throws IOException { } public void testResolveSimilaritiesFromMapping_DFI() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "DFI") @@ -192,11 +193,11 @@ public void testResolveSimilaritiesFromMapping_DFI() throws IOException { } public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "LMDirichlet") @@ -211,11 +212,11 @@ public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException } public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "LMJelinekMercer") @@ -230,11 +231,11 @@ public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOExcept } public void testResolveSimilaritiesFromMapping_Unknown() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "unknown_similarity").endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); IndexService indexService = createIndex("foo"); try { diff --git a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java index 8a63c237e90d5..626b2b0e0e2b8 100644 --- a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java +++ b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java @@ -57,7 +57,7 @@ public void testToFromXContent() throws IOException { BlobStoreIndexShardSnapshot.FileInfo info = new BlobStoreIndexShardSnapshot.FileInfo("_foobar", meta, size); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); BlobStoreIndexShardSnapshot.FileInfo.toXContent(info, builder, ToXContent.EMPTY_PARAMS); - byte[] xcontent = BytesReference.toBytes(shuffleXContent(builder).bytes()); + byte[] xcontent = BytesReference.toBytes(BytesReference.bytes(shuffleXContent(builder))); final BlobStoreIndexShardSnapshot.FileInfo parsedInfo; try (XContentParser parser = createParser(JsonXContent.jsonXContent, xcontent)) { @@ -116,7 +116,7 @@ public void testInvalidFieldsInFromXContent() throws IOException { builder.field(FileInfo.WRITTEN_BY, Version.LATEST.toString()); builder.field(FileInfo.CHECKSUM, "666"); builder.endObject(); - byte[] xContent = BytesReference.toBytes(builder.bytes()); + byte[] xContent = BytesReference.toBytes(BytesReference.bytes(builder)); if (failure == null) { // No failures should read as usual diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 7547dfc513d8e..5407dbb911b69 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -468,7 +469,7 @@ public void testStats() throws IOException { builder.startObject(); copy.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertThat(builder.string(), equalTo("{\"translog\":{\"operations\":4,\"size_in_bytes\":" + expectedSizeInBytes + assertThat(Strings.toString(builder), equalTo("{\"translog\":{\"operations\":4,\"size_in_bytes\":" + expectedSizeInBytes + ",\"uncommitted_operations\":4,\"uncommitted_size_in_bytes\":" + expectedSizeInBytes + ",\"earliest_last_modified_age\":" + stats.getEarliestLastModifiedAge() + "}}")); } diff --git a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index 87747990b9db9..5970e1121bdee 100644 --- a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -224,12 +225,12 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); responseBuilder.endObject(); - String responseStrings = responseBuilder.string(); + String responseStrings = Strings.toString(responseBuilder); XContentBuilder prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings)); - assertThat(responseStrings, equalTo(prettyJsonBuilder.string())); + assertThat(responseStrings, equalTo(Strings.toString(prettyJsonBuilder))); params.put("pretty", "false"); @@ -238,11 +239,11 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); responseBuilder.endObject(); - responseStrings = responseBuilder.string(); + responseStrings = Strings.toString(responseBuilder); prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings)); - assertThat(responseStrings, not(equalTo(prettyJsonBuilder.string()))); + assertThat(responseStrings, not(equalTo(Strings.toString(prettyJsonBuilder)))); } diff --git a/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 4ab4cab52cf10..31ee22200a219 100644 --- a/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -81,22 +82,22 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc assertThat("Breaker is not set to 0", node.getBreaker().getStats(CircuitBreaker.FIELDDATA).getEstimated(), equalTo(0L)); } - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("test-str") - .field("type", "keyword") - .field("doc_values", randomBoolean()) - .endObject() // test-str - .startObject("test-num") - // I don't use randomNumericType() here because I don't want "byte", and I want "float" and "double" - .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer"))) - .endObject() // test-num - .endObject() // properties - .endObject() // type - .endObject() // {} - .string(); + String mapping = Strings // {} + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("test-str") + .field("type", "keyword") + .field("doc_values", randomBoolean()) + .endObject() // test-str + .startObject("test-num") + // I don't use randomNumericType() here because I don't want "byte", and I want "float" and "double" + .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer"))) + .endObject() // test-num + .endObject() // properties + .endObject() // type + .endObject()); final double topLevelRate; final double lowLevelRate; if (frequently()) { diff --git a/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java b/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java index 02191bc22fa69..2a867915b0fe6 100644 --- a/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -332,7 +333,7 @@ private void checkIndexState(IndexMetaData.State expectedState, String... indice } public void testOpenCloseWithDocs() throws IOException, ExecutionException, InterruptedException { - String mapping = XContentFactory.jsonBuilder(). + String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject(). startObject("type"). startObject("properties"). @@ -341,7 +342,7 @@ public void testOpenCloseWithDocs() throws IOException, ExecutionException, Inte .endObject(). endObject(). endObject() - .endObject().string(); + .endObject()); assertAcked(client().admin().indices().prepareCreate("test") .addMapping("type", mapping, XContentType.JSON)); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index d27b05d1e7b29..9e97e9bbfd449 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -48,10 +47,8 @@ import java.util.Collection; import java.util.HashMap; import java.util.Map; -import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; @@ -76,7 +73,7 @@ protected Collection> nodePlugins() { } public void testSimulate() throws Exception { - BytesReference pipelineSource = jsonBuilder().startObject() + BytesReference pipelineSource = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -84,7 +81,7 @@ public void testSimulate() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); client().admin().cluster().preparePutPipeline("_id", pipelineSource, XContentType.JSON) .get(); GetPipelineResponse getResponse = client().admin().cluster().prepareGetPipeline("_id") @@ -93,7 +90,7 @@ public void testSimulate() throws Exception { assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); - BytesReference bytes = jsonBuilder().startObject() + BytesReference bytes = BytesReference.bytes(jsonBuilder().startObject() .startArray("docs") .startObject() .field("_index", "index") @@ -105,7 +102,7 @@ public void testSimulate() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); SimulatePipelineResponse response; if (randomBoolean()) { response = client().admin().cluster().prepareSimulatePipeline(bytes, XContentType.JSON) @@ -136,7 +133,7 @@ public void testSimulate() throws Exception { public void testBulkWithIngestFailures() throws Exception { createIndex("index"); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -144,7 +141,7 @@ public void testBulkWithIngestFailures() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); client().admin().cluster().putPipeline(putPipelineRequest).get(); @@ -182,7 +179,7 @@ public void testBulkWithIngestFailures() throws Exception { public void testBulkWithUpsert() throws Exception { createIndex("index"); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -190,7 +187,7 @@ public void testBulkWithUpsert() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); client().admin().cluster().putPipeline(putPipelineRequest).get(); @@ -217,7 +214,7 @@ public void testBulkWithUpsert() throws Exception { } public void test() throws Exception { - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -225,7 +222,7 @@ public void test() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); client().admin().cluster().putPipeline(putPipelineRequest).get(); @@ -258,7 +255,7 @@ public void test() throws Exception { } public void testPutWithPipelineFactoryError() throws Exception { - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -267,7 +264,7 @@ public void testPutWithPipelineFactoryError() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id2", source, XContentType.JSON); Exception e = expectThrows(ElasticsearchParseException.class, () -> client().admin().cluster().putPipeline(putPipelineRequest).actionGet()); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java index 03777b98ab73e..84d9327a0910a 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java @@ -45,14 +45,14 @@ public class IngestProcessorNotInstalledOnAllNodesIT extends ESIntegTestCase { private volatile boolean installPlugin; public IngestProcessorNotInstalledOnAllNodesIT() throws IOException { - pipelineSource = jsonBuilder().startObject() + pipelineSource = BytesReference.bytes(jsonBuilder().startObject() .startArray("processors") .startObject() .startObject("test") .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); } @Override diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java index eb1b7814ab851..7b134879cda45 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java @@ -58,7 +58,7 @@ public void testParser() throws IOException { try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { new PipelineConfiguration("1", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), XContentType.JSON) .toXContent(builder, ToXContent.EMPTY_PARAMS); - bytes = builder.bytes(); + bytes = BytesReference.bytes(builder); } XContentParser xContentParser = xContentType.xContent() diff --git a/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java b/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java index 3c30e7610dd26..07dcabf396b59 100644 --- a/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java +++ b/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java @@ -155,11 +155,11 @@ public void testThatParentPerDocumentIsSupported() throws Exception { @SuppressWarnings("unchecked") public void testThatSourceFilteringIsSupported() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); - BytesReference sourceBytesRef = jsonBuilder().startObject() + BytesReference sourceBytesRef = BytesReference.bytes(jsonBuilder().startObject() .array("field", "1", "2") .startObject("included").field("field", "should be seen").field("hidden_field", "should not be seen").endObject() .field("excluded", "should not be seen") - .endObject().bytes(); + .endObject()); for (int i = 0; i < 100; i++) { client().prepareIndex("test", "type", Integer.toString(i)).setSource(sourceBytesRef, XContentType.JSON).get(); } diff --git a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 4da927459e55a..107ac38400e0d 100644 --- a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; @@ -101,7 +102,7 @@ private void compareJsonOutput(ToXContent param1, ToXContent param2) throws IOEx param2Builder.startObject(); param2.toXContent(param2Builder, params); param2Builder.endObject(); - assertThat(param1Builder.string(), equalTo(param2Builder.string())); + assertThat(Strings.toString(param1Builder), equalTo(Strings.toString(param2Builder))); } private static NodeInfo createNodeInfo() { diff --git a/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java b/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java index 51167d862fdfd..cd902bd438fe4 100644 --- a/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.repositories; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -68,7 +69,7 @@ public void testXContent() throws IOException { IndexId indexId = new IndexId(randomAlphaOfLength(8), UUIDs.randomBase64UUID()); XContentBuilder builder = JsonXContent.contentBuilder(); indexId.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); String name = null; String id = null; diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index 40ff1bad9767f..8c1e242b3262f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.repositories; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -57,7 +58,7 @@ public void testXContent() throws IOException { RepositoryData repositoryData = generateRandomRepoData(); XContentBuilder builder = JsonXContent.contentBuilder(); repositoryData.snapshotsToXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); long gen = (long) randomIntBetween(0, 500); RepositoryData fromXContent = RepositoryData.snapshotsFromXContent(parser, gen); assertEquals(repositoryData, fromXContent); diff --git a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index c658f06637ea0..96106125f19ef 100644 --- a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -338,7 +338,7 @@ public void testNoErrorFromXContent() throws IOException { builder.field("status", randomFrom(RestStatus.values()).getStatus()); builder.endObject(); - try (XContentParser parser = createParser(builder.contentType().xContent(), builder.bytes())) { + try (XContentParser parser = createParser(builder.contentType().xContent(), BytesReference.bytes(builder))) { BytesRestResponse.errorFromXContent(parser); } } diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 08cab9ea2e92b..cb2d51f6a675e 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -412,7 +411,8 @@ public boolean supportsContentStream() { public void testNonStreamingXContentCausesErrorResponse() throws IOException { FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) - .withContent(YamlXContent.contentBuilder().startObject().endObject().bytes(), XContentType.YAML).withPath("/foo").build(); + .withContent(BytesReference.bytes(YamlXContent.contentBuilder().startObject().endObject()), + XContentType.YAML).withPath("/foo").build(); AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); restController.registerHandler(RestRequest.Method.GET, "/foo", new RestHandler() { @Override diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java index 13e6de063e81e..d29bf1a82cb1a 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java @@ -96,7 +96,7 @@ public void testGetResponse() throws Exception { responseBuilder.prettyPrint().lfAtEnd(); } mainResponse.toXContent(responseBuilder, ToXContent.EMPTY_PARAMS); - BytesReference xcontentBytes = responseBuilder.bytes(); + BytesReference xcontentBytes = BytesReference.bytes(responseBuilder); assertEquals(xcontentBytes, response.content()); } } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java b/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java index 801ed758cb228..d5769cd192b75 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -39,7 +40,8 @@ public void testFromXContentLoading() throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject().field("lang0#id0", "script0").field("lang1#id0", "script1").endObject(); XContentParser parser0 = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes().streamInput()); + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); expectThrows(IllegalArgumentException.class, () -> ScriptMetaData.fromXContent(parser0)); // failure to load a new namespace script and old namespace script with the same id but different langs @@ -47,7 +49,8 @@ public void testFromXContentLoading() throws Exception { builder.startObject().field("lang0#id0", "script0") .startObject("id0").field("lang", "lang1").field("source", "script1").endObject().endObject(); XContentParser parser1 = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes().streamInput()); + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); expectThrows(IllegalArgumentException.class, () -> ScriptMetaData.fromXContent(parser1)); // failure to load a new namespace script and old namespace script with the same id but different langs with additional scripts @@ -56,7 +59,8 @@ public void testFromXContentLoading() throws Exception { .startObject("id1").field("lang", "lang0").field("source", "script0").endObject() .startObject("id0").field("lang", "lang1").field("source", "script1").endObject().endObject(); XContentParser parser2 = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes().streamInput()); + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); expectThrows(IllegalArgumentException.class, () -> ScriptMetaData.fromXContent(parser2)); // okay to load the same script from the new and old namespace if the lang is the same @@ -64,7 +68,8 @@ public void testFromXContentLoading() throws Exception { builder.startObject().field("lang0#id0", "script0") .startObject("id0").field("lang", "lang0").field("source", "script1").endObject().endObject(); XContentParser parser3 = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes().streamInput()); + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput()); ScriptMetaData.fromXContent(parser3); } @@ -73,15 +78,15 @@ public void testGetScript() throws Exception { XContentBuilder sourceBuilder = XContentFactory.jsonBuilder(); sourceBuilder.startObject().startObject("template").field("field", "value").endObject().endObject(); - builder.storeScript("template", StoredScriptSource.parse(sourceBuilder.bytes(), sourceBuilder.contentType())); + builder.storeScript("template", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); sourceBuilder = XContentFactory.jsonBuilder(); sourceBuilder.startObject().field("template", "value").endObject(); - builder.storeScript("template_field", StoredScriptSource.parse(sourceBuilder.bytes(), sourceBuilder.contentType())); + builder.storeScript("template_field", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); sourceBuilder = XContentFactory.jsonBuilder(); sourceBuilder.startObject().startObject("script").field("lang", "_lang").field("source", "_source").endObject().endObject(); - builder.storeScript("script", StoredScriptSource.parse(sourceBuilder.bytes(), sourceBuilder.contentType())); + builder.storeScript("script", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); ScriptMetaData scriptMetaData = builder.build(); assertEquals("_source", scriptMetaData.getStoredScript("script").getSource()); @@ -134,7 +139,7 @@ private ScriptMetaData randomScriptMetaData(XContentType sourceContentType, int .field("lang", randomAlphaOfLength(4)).field("source", randomAlphaOfLength(10)) .endObject().endObject(); builder.storeScript(randomAlphaOfLength(i + 1), - StoredScriptSource.parse(sourceBuilder.bytes(), sourceBuilder.contentType())); + StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); } return builder.build(); } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 42a4c2f6abb1a..fb140462086b2 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -263,14 +263,14 @@ public void testCacheEvictionCountedInCacheEvictionsStats() throws IOException { } public void testStoreScript() throws Exception { - BytesReference script = XContentFactory.jsonBuilder() + BytesReference script = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("script") .startObject() .field("lang", "_lang") .field("source", "abc") .endObject() - .endObject().bytes(); + .endObject()); ScriptMetaData scriptMetaData = ScriptMetaData.putStoredScript(null, "_id", StoredScriptSource.parse(script, XContentType.JSON)); assertNotNull(scriptMetaData); assertEquals("abc", scriptMetaData.getStoredScript("_id").getSource()); diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTests.java index 0459be255e57f..6e578ed910d40 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.script; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.settings.Settings; @@ -70,7 +71,7 @@ private Script createScript() throws IOException { builder.startObject(); builder.field("field", randomAlphaOfLengthBetween(1, 5)); builder.endObject(); - script = builder.string(); + script = Strings.toString(builder); } } else { script = randomAlphaOfLengthBetween(1, 5); diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java index 9174943e48b06..168ec4fc553b9 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.script; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -48,7 +50,7 @@ protected StoredScriptSource createTestInstance() { if (randomBoolean()) { options.put(Script.CONTENT_TYPE_OPTION, xContentType.mediaType()); } - return StoredScriptSource.parse(template.bytes(), xContentType); + return StoredScriptSource.parse(BytesReference.bytes(template), xContentType); } catch (IOException e) { throw new AssertionError("Failed to create test instance", e); } @@ -82,7 +84,7 @@ protected StoredScriptSource mutateInstance(StoredScriptSource instance) throws switch (between(0, 3)) { case 0: - source = newTemplate.string(); + source = Strings.toString(newTemplate); break; case 1: lang = randomAlphaOfLengthBetween(1, 20); @@ -93,7 +95,7 @@ protected StoredScriptSource mutateInstance(StoredScriptSource instance) throws break; case 3: default: - return new StoredScriptSource(newTemplate.string()); + return new StoredScriptSource(Strings.toString(newTemplate)); } return new StoredScriptSource(lang, source, options); } diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java index f66f4b68b55b1..2bf0216c546ec 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.script; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -57,7 +59,7 @@ public void testSourceParsing() throws Exception { try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().startObject("script").field("lang", "lang").field("source", "code").endObject().endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -67,7 +69,7 @@ public void testSourceParsing() throws Exception { try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().field("template", "code").endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("mustache", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -79,10 +81,10 @@ public void testSourceParsing() throws Exception { String code; try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = cb.startObject().field("query", "code").endObject().string(); + code = Strings.toString(cb.startObject().field("query", "code").endObject()); } - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -94,10 +96,10 @@ public void testSourceParsing() throws Exception { String code; try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = cb.startObject().field("query", "code").endObject().string(); + code = Strings.toString(cb.startObject().field("query", "code").endObject()); } - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -111,10 +113,10 @@ public void testSourceParsing() throws Exception { String code; try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = cb.startObject().field("query", "code").endObject().string(); + code = Strings.toString(cb.startObject().field("query", "code").endObject()); } - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.singletonMap("content_type", "application/json; charset=UTF-8")); @@ -125,7 +127,7 @@ public void testSourceParsing() throws Exception { try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "lang").field("source", "code").endObject().endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -135,7 +137,7 @@ public void testSourceParsing() throws Exception { try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "lang").field("code", "code").endObject().endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -147,7 +149,7 @@ public void testSourceParsing() throws Exception { builder.startObject().field("script").startObject().field("lang", "lang").field("source", "code") .field("options").startObject().endObject().endObject().endObject(); - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", "code", Collections.emptyMap()); assertThat(parsed, equalTo(source)); @@ -155,15 +157,16 @@ public void testSourceParsing() throws Exception { // complex script with embedded template try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { - builder.startObject().field("script").startObject().field("lang", "lang").startObject("source").field("query", "code") - .endObject().startObject("options").endObject().endObject().endObject().string(); + Strings.toString(builder.startObject().field("script").startObject().field("lang", "lang") + .startObject("source").field("query", "code") + .endObject().startObject("options").endObject().endObject().endObject()); String code; try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = cb.startObject().field("query", "code").endObject().string(); + code = Strings.toString(cb.startObject().field("query", "code").endObject()); } - StoredScriptSource parsed = StoredScriptSource.parse(builder.bytes(), XContentType.JSON); + StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); StoredScriptSource source = new StoredScriptSource("lang", code, Collections.singletonMap(Script.CONTENT_TYPE_OPTION, builder.contentType().mediaType())); @@ -177,7 +180,7 @@ public void testSourceParsingErrors() throws Exception { builder.startObject().field("script").startObject().field("source", "code").endObject().endObject(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> - StoredScriptSource.parse(builder.bytes(), XContentType.JSON)); + StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON)); assertThat(iae.getMessage(), equalTo("must specify lang for stored script")); } @@ -186,7 +189,7 @@ public void testSourceParsingErrors() throws Exception { builder.startObject().field("script").startObject().field("lang", "lang").endObject().endObject(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> - StoredScriptSource.parse(builder.bytes(), XContentType.JSON)); + StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON)); assertThat(iae.getMessage(), equalTo("must specify source for stored script")); } @@ -196,7 +199,7 @@ public void testSourceParsingErrors() throws Exception { .startObject("options").field("option", "option").endObject().endObject().endObject(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> - StoredScriptSource.parse(builder.bytes(), XContentType.JSON)); + StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON)); assertThat(iae.getMessage(), equalTo("illegal compiler options [{option=option}] specified")); } } diff --git a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java index 3e5943951aec2..b0eb9e907618f 100644 --- a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java +++ b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent; @@ -76,7 +77,7 @@ public void testToXContent() throws IOException { " \"field\" : \"foo\",\n" + " \"offset\" : 5\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); nestedIdentity = new NestedIdentity("foo", 5, new NestedIdentity("bar", 3, null)); builder = JsonXContent.contentBuilder(); @@ -94,7 +95,7 @@ public void testToXContent() throws IOException { " \"offset\" : 3\n" + " }\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } /** diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 0649a68be758c..7ed1c006d0bb5 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; @@ -204,7 +205,7 @@ public void testToXContent() throws IOException { searchHit.score(1.5f); XContentBuilder builder = JsonXContent.contentBuilder(); searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":1.5}", builder.string()); + assertEquals("{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":1.5}", Strings.toString(builder)); } public void testSerializeShardTarget() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index decfe804a4284..075d5bc2aa3df 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.search; import org.apache.lucene.util.TestUtil; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; @@ -112,7 +113,7 @@ public void testToXContent() throws IOException { builder.endObject(); assertEquals("{\"hits\":{\"total\":1000,\"max_score\":1.5," + "\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":\"-Infinity\"},"+ - "{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":\"-Infinity\"}]}}", builder.string()); + "{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":\"-Infinity\"}]}}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java index 316ef3d455984..d1a9a15a3937c 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -88,7 +89,7 @@ public void testToXContent() throws IOException { builder.startObject(); sortValues.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertEquals("{\"sort\":[1,\"foo\",3.0]}", builder.string()); + assertEquals("{\"sort\":[1,\"foo\",3.0]}", Strings.toString(builder)); } /** diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 29c187f59a88a..29d8e327d5cd7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -252,7 +252,7 @@ public void testParsingExceptionOnUnknownAggregation() throws IOException { builder.endObject(); } builder.endObject(); - BytesReference originalBytes = builder.bytes(); + BytesReference originalBytes = BytesReference.bytes(builder); try (XContentParser parser = createParser(builder.contentType().xContent(), originalBytes)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); ParsingException ex = expectThrows(ParsingException.class, () -> Aggregations.fromXContent(parser)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 884e732c39107..642092507fed9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -260,7 +260,7 @@ public void testRewrite() throws Exception { builder.endObject(); } builder.endObject(); - bytesReference = builder.bytes(); + bytesReference = BytesReference.bytes(builder); } FilterAggregationBuilder filterAggBuilder = new FilterAggregationBuilder("titles", new WrapperQueryBuilder(bytesReference)); BucketScriptPipelineAggregationBuilder pipelineAgg = new BucketScriptPipelineAggregationBuilder("const", new Script("1")); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java index ce5e4a694f279..679941437f029 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java @@ -56,7 +56,7 @@ public void testWrapperQueryIsRewritten() throws IOException { builder.endObject(); } builder.endObject(); - bytesReference = builder.bytes(); + bytesReference = BytesReference.bytes(builder); } FiltersAggregationBuilder builder = new FiltersAggregationBuilder("titles", new FiltersAggregator.KeyedFilter("titleterms", new WrapperQueryBuilder(bytesReference))); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index db5a0a1cd8ec2..8b00c42311add 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; @@ -1220,7 +1221,7 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { } public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception { - String mappingJson = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("date").field("type", "date").field("format", "dateOptionalTime||dd-MM-yyyy").endObject().endObject().endObject().endObject().string(); + String mappingJson = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties").startObject("date").field("type", "date").field("format", "dateOptionalTime||dd-MM-yyyy").endObject().endObject().endObject().endObject()); prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).execute().actionGet(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; for (int i = 0; i < reqs.length; i++) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 737cd3513001e..79d0a0ad17e69 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -334,7 +335,7 @@ public void testXContentResponse() throws Exception { + "\"score\":0.75," + "\"bg_count\":4" + "}]}}]}}"; - assertThat(responseBuilder.string(), equalTo(result)); + assertThat(Strings.toString(responseBuilder), equalTo(result)); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java index dddfee7d094d2..86ddd4843a75b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java @@ -162,7 +162,7 @@ public void testXContentRoundTrip() throws Exception { orig.toXContent(out, ToXContent.EMPTY_PARAMS); out.endObject(); - try (XContentParser in = createParser(JsonXContent.jsonXContent, out.bytes())) { + try (XContentParser in = createParser(JsonXContent.jsonXContent, BytesReference.bytes(out))) { XContentParser.Token token = in.currentToken(); assertNull(token); @@ -176,7 +176,7 @@ public void testXContentRoundTrip() throws Exception { ExtendedBounds read = ExtendedBounds.PARSER.apply(in, null); assertEquals(orig, read); } catch (Exception e) { - throw new Exception("Error parsing [" + out.bytes().utf8ToString() + "]", e); + throw new Exception("Error parsing [" + BytesReference.bytes(out).utf8ToString() + "]", e); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 796355ebfb190..b8c9825d9b5a5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -280,7 +281,7 @@ protected SignificanceHeuristic parseFromBuilder(ParseFieldRegistry> getMockPlugins() { } public void testRandomExceptions() throws IOException, InterruptedException, ExecutionException { - String mapping = XContentFactory.jsonBuilder(). + String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject(). startObject("type"). startObject("properties"). @@ -80,7 +81,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe .endObject(). endObject(). endObject() - .endObject().string(); + .endObject()); final double lowLevelRate; final double topLevelRate; if (frequently()) { diff --git a/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index 835b980d6653e..931f940658893 100644 --- a/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Requests; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; @@ -54,7 +55,7 @@ protected Collection> nodePlugins() { } public void testRandomDirectoryIOExceptions() throws IOException, InterruptedException, ExecutionException { - String mapping = XContentFactory.jsonBuilder(). + String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject(). startObject("type"). startObject("properties"). @@ -63,7 +64,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc .endObject(). endObject(). endObject() - .endObject().string(); + .endObject()); final double exceptionRate; final double exceptionOnOpenRate; if (frequently()) { diff --git a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index a72c78f79d2d9..66d6f68b8a4aa 100644 --- a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -345,7 +345,7 @@ public void testToXContent() throws IOException { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); XContentBuilder builder = XContentFactory.contentBuilder(xContentType); searchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); - BytesReference source = builder.bytes(); + BytesReference source = BytesReference.bytes(builder); Map sourceAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); assertEquals(0, sourceAsMap.size()); } @@ -354,7 +354,7 @@ public void testToXContent() throws IOException { searchSourceBuilder.query(RandomQueryBuilder.createQuery(random())); XContentBuilder builder = XContentFactory.contentBuilder(xContentType); searchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); - BytesReference source = builder.bytes(); + BytesReference source = BytesReference.bytes(builder); Map sourceAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); assertEquals(1, sourceAsMap.size()); assertEquals("query", sourceAsMap.keySet().iterator().next()); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java index 800b863138753..5cc4e2ddc68a7 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourceSubPhaseTests.java @@ -103,7 +103,8 @@ private FetchSubPhase.HitContext hitExecute(XContentBuilder source, boolean fetc private FetchSubPhase.HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSource, String[] includes, String[] excludes) { FetchSourceContext fetchSourceContext = new FetchSourceContext(fetchSource, includes, excludes); - SearchContext searchContext = new FetchSourceSubPhaseTestSearchContext(fetchSourceContext, source == null ? null : source.bytes()); + SearchContext searchContext = new FetchSourceSubPhaseTestSearchContext(fetchSourceContext, + source == null ? null : BytesReference.bytes(source)); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); hitContext.reset(new SearchHit(1, null, null, null), null, 1, null); FetchSourceSubPhase phase = new FetchSourceSubPhase(); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java index b4bf2950b7d07..7b27cf78ec65a 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.fetch.subphase.highlight; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.text.Text; @@ -86,7 +87,7 @@ public void testToXContent() throws IOException { " \"bar\",\n" + " \"baz\"\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); field = new HighlightField("foo", null); builder = JsonXContent.contentBuilder(); @@ -97,7 +98,7 @@ public void testToXContent() throws IOException { assertEquals( "{\n" + " \"foo\" : null\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } /** diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index cc23deda2d856..7f61655a09273 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; @@ -2724,7 +2725,7 @@ public void testKeywordFieldHighlighting() throws IOException { } public void testACopyFieldWithNestedQuery() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo") .field("type", "nested") .startObject("properties") @@ -2739,7 +2740,7 @@ public void testACopyFieldWithNestedQuery() throws Exception { .field("term_vector", "with_positions_offsets") .field("store", true) .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().startArray("foo") @@ -2872,7 +2873,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { } public void testWithNestedQuery() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("text") .field("type", "text") .field("index_options", "offsets") @@ -2886,7 +2887,7 @@ public void testWithNestedQuery() throws Exception { .endObject() .endObject() .endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject() diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 71db66c7fb208..d7480c2b6fb2e 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.fields; -import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; @@ -160,12 +160,12 @@ static Object docScript(Map vars, String fieldName) { public void testStoredFields() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() .startObject("field2").field("type", "text").field("store", false).endObject() .startObject("field3").field("type", "text").field("store", true).endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); @@ -253,9 +253,9 @@ public void testStoredFields() throws Exception { public void testScriptDocAndFields() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("num1").field("type", "double").field("store", true).endObject() - .endObject().endObject().endObject().string(); + .endObject().endObject().endObject()); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); @@ -514,53 +514,53 @@ public void testPartialFields() throws Exception { public void testStoredFieldsWithoutSource() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_source") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("byte_field") - .field("type", "byte") - .field("store", true) - .endObject() - .startObject("short_field") - .field("type", "short") - .field("store", true) - .endObject() - .startObject("integer_field") - .field("type", "integer") - .field("store", true) - .endObject() - .startObject("long_field") - .field("type", "long") - .field("store", true) - .endObject() - .startObject("float_field") - .field("type", "float") - .field("store", true) - .endObject() - .startObject("double_field") - .field("type", "double") - .field("store", true) - .endObject() - .startObject("date_field") - .field("type", "date") - .field("store", true) - .endObject() - .startObject("boolean_field") - .field("type", "boolean") - .field("store", true) - .endObject() - .startObject("binary_field") - .field("type", "binary") - .field("store", true) + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("byte_field") + .field("type", "byte") + .field("store", true) + .endObject() + .startObject("short_field") + .field("type", "short") + .field("store", true) + .endObject() + .startObject("integer_field") + .field("type", "integer") + .field("store", true) + .endObject() + .startObject("long_field") + .field("type", "long") + .field("store", true) + .endObject() + .startObject("float_field") + .field("type", "float") + .field("store", true) + .endObject() + .startObject("double_field") + .field("type", "double") + .field("store", true) + .endObject() + .startObject("date_field") + .field("type", "date") + .field("store", true) + .endObject() + .startObject("boolean_field") + .field("type", "boolean") + .field("store", true) + .endObject() + .startObject("binary_field") + .field("type", "binary") + .field("store", true) + .endObject() + .endObject() .endObject() - .endObject() - .endObject() - .endObject() - .string(); + .endObject()); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); @@ -670,7 +670,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject()) .get(); - BytesReference source = jsonBuilder().startObject() + BytesReference source = BytesReference.bytes(jsonBuilder().startObject() .startArray("field1") .startObject() .startObject("field2") @@ -691,7 +691,7 @@ public void testGetFieldsComplexField() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); client().prepareIndex("my-index", "doc", "1").setRefreshPolicy(IMMEDIATE).setSource(source, XContentType.JSON).get(); @@ -722,54 +722,54 @@ public void testSingleValueFieldDatatField() throws ExecutionException, Interrup public void testFieldsPulledFromFieldData() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_source") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("text_field") - .field("type", "text") - .field("fielddata", true) - .endObject() - .startObject("keyword_field") - .field("type", "keyword") - .endObject() - .startObject("byte_field") - .field("type", "byte") - .endObject() - .startObject("short_field") - .field("type", "short") - .endObject() - .startObject("integer_field") - .field("type", "integer") - .endObject() - .startObject("long_field") - .field("type", "long") - .endObject() - .startObject("float_field") - .field("type", "float") - .endObject() - .startObject("double_field") - .field("type", "double") - .endObject() - .startObject("date_field") - .field("type", "date") - .endObject() - .startObject("boolean_field") - .field("type", "boolean") - .endObject() - .startObject("binary_field") - .field("type", "binary") - .endObject() - .startObject("ip_field") - .field("type", "ip") + String mapping = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("text_field") + .field("type", "text") + .field("fielddata", true) + .endObject() + .startObject("keyword_field") + .field("type", "keyword") + .endObject() + .startObject("byte_field") + .field("type", "byte") + .endObject() + .startObject("short_field") + .field("type", "short") + .endObject() + .startObject("integer_field") + .field("type", "integer") + .endObject() + .startObject("long_field") + .field("type", "long") + .endObject() + .startObject("float_field") + .field("type", "float") + .endObject() + .startObject("double_field") + .field("type", "double") + .endObject() + .startObject("date_field") + .field("type", "date") + .endObject() + .startObject("boolean_field") + .field("type", "boolean") + .endObject() + .startObject("binary_field") + .field("type", "binary") + .endObject() + .startObject("ip_field") + .field("type", "ip") + .endObject() + .endObject() .endObject() - .endObject() - .endObject() - .endObject() - .string(); + .endObject()); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 123109cb08c01..0038ef368c150 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; @@ -202,7 +203,7 @@ public void testShapeRelations() throws Exception { assertTrue("Disjoint relation is not supported", disjointSupport); assertTrue("within relation is not supported", withinSupport); - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("polygon") .startObject("properties") @@ -212,7 +213,7 @@ public void testShapeRelations() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes") .addMapping("polygon", mapping, XContentType.JSON); @@ -229,7 +230,7 @@ public void testShapeRelations() throws Exception { .coordinate(-5, -5).coordinate(-5, 5).coordinate(5, 5).coordinate(5, -5).close()))) .polygon(new PolygonBuilder( new CoordinatesBuilder().coordinate(-4, -4).coordinate(-4, 4).coordinate(4, 4).coordinate(4, -4).close())); - BytesReference data = jsonBuilder().startObject().field("area", polygon).endObject().bytes(); + BytesReference data = BytesReference.bytes(jsonBuilder().startObject().field("area", polygon).endObject()); client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); @@ -292,7 +293,7 @@ public void testShapeRelations() throws Exception { .hole(new LineStringBuilder( new CoordinatesBuilder().coordinate(-4, -4).coordinate(-4, 4).coordinate(4, 4).coordinate(4, -4).close())); - data = jsonBuilder().startObject().field("area", inverse).endObject().bytes(); + data = BytesReference.bytes(jsonBuilder().startObject().field("area", inverse).endObject()); client().prepareIndex("shapes", "polygon", "2").setSource(data, XContentType.JSON).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); @@ -326,7 +327,7 @@ public void testShapeRelations() throws Exception { builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close()); - data = jsonBuilder().startObject().field("area", builder).endObject().bytes(); + data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); @@ -335,7 +336,7 @@ public void testShapeRelations() throws Exception { .coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close()) .hole(new LineStringBuilder(new CoordinatesBuilder().coordinate(175, -5).coordinate(185, -5).coordinate(185, 5).coordinate(175, 5).close())); - data = jsonBuilder().startObject().field("area", builder).endObject().bytes(); + data = BytesReference.bytes(jsonBuilder().startObject().field("area", builder).endObject()); client().prepareIndex("shapes", "polygon", "1").setSource(data, XContentType.JSON).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java index e848b70179343..a8f559ce35e4c 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -42,22 +43,22 @@ public class GeoShapeIntegrationIT extends ESIntegTestCase { */ public void testOrientationPersistence() throws Exception { String idxName = "orientation"; - String mapping = XContentFactory.jsonBuilder().startObject().startObject("shape") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("orientation", "left") - .endObject().endObject() - .endObject().endObject().string(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("shape") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("orientation", "left") + .endObject().endObject() + .endObject().endObject()); // create index assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); - mapping = XContentFactory.jsonBuilder().startObject().startObject("shape") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("orientation", "right") - .endObject().endObject() - .endObject().endObject().string(); + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("shape") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("orientation", "right") + .endObject().endObject() + .endObject().endObject()); assertAcked(prepareCreate(idxName+"2").addMapping("shape", mapping, XContentType.JSON)); ensureGreen(idxName, idxName+"2"); @@ -100,7 +101,7 @@ public void testIgnoreMalformed() throws Exception { ensureGreen(); // test self crossing ccw poly not crossing dateline - String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() @@ -112,7 +113,7 @@ public void testIgnoreMalformed() throws Exception { .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() - .endObject().string(); + .endObject()); indexRandom(true, client().prepareIndex("test", "geometry", "0").setSource("shape", polygonGeoJson)); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index 5ca4193da46fb..c877cb3be180c 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.geo; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; @@ -57,16 +58,15 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; public class GeoShapeQueryTests extends ESSingleNodeTestCase { public void testNullShape() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -77,12 +77,12 @@ public void testNullShape() throws Exception { } public void testIndexPointsFilterRectangle() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -124,12 +124,12 @@ public void testIndexPointsFilterRectangle() throws Exception { } public void testEdgeCases() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -161,12 +161,12 @@ public void testEdgeCases() throws Exception { } public void testIndexedShapeReference() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", "quadtree") .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); createIndex("shapes"); ensureGreen(); @@ -237,9 +237,9 @@ public void testReusableBuilder() throws IOException { } private void assertUnmodified(ShapeBuilder builder) throws IOException { - String before = jsonBuilder().startObject().field("area", builder).endObject().string(); + String before = Strings.toString(jsonBuilder().startObject().field("area", builder).endObject()); builder.build(); - String after = jsonBuilder().startObject().field("area", builder).endObject().string(); + String after = Strings.toString(jsonBuilder().startObject().field("area", builder).endObject()); assertThat(before, equalTo(after)); } @@ -438,7 +438,7 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { } public void testPointsOnly() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", randomBoolean() ? "quadtree" : "geohash") @@ -446,7 +446,7 @@ public void testPointsOnly() throws Exception { .field("distance_error_pct", "0.01") .field("points_only", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -471,7 +471,7 @@ public void testPointsOnly() throws Exception { } public void testPointsOnlyExplicit() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") .field("tree", randomBoolean() ? "quadtree" : "geohash") @@ -479,7 +479,7 @@ public void testPointsOnlyExplicit() throws Exception { .field("distance_error_pct", "0.01") .field("points_only", true) .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java index 8b1558ba0b319..c62a311488b1f 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java @@ -147,7 +147,7 @@ public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOExc XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - return new CompressedXContent(builder.string()); + return new CompressedXContent(Strings.toString(builder)); } private IndexMetaData remove(IndexMetaData indexMetaData, String alias) { diff --git a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index 49676486588d9..185ec53f3b4e3 100644 --- a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -161,10 +162,10 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { String aliasName = "foo_name"; String typeName = "bar"; - String mapping = XContentFactory.jsonBuilder().startObject().startObject("bar") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate(indexName).addMapping(typeName, mapping, XContentType.JSON).get(); client().admin().indices().prepareAliases().addAlias(indexName, aliasName).get(); @@ -183,10 +184,10 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { public void testMoreLikeThisIssue2197() throws Exception { Client client = client(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("bar") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).execute().actionGet(); client().prepareIndex("foo", "bar", "1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) @@ -206,10 +207,10 @@ public void testMoreLikeThisIssue2197() throws Exception { // Issue #2489 public void testMoreLikeWithCustomRouting() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("bar") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() - .endObject().endObject().string(); + .endObject().endObject()); client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).execute().actionGet(); ensureGreen(); @@ -227,10 +228,10 @@ public void testMoreLikeWithCustomRouting() throws Exception { // Issue #3039 public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("bar") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() - .endObject().endObject().string(); + .endObject().endObject()); assertAcked(prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)) .addMapping("bar", mapping, XContentType.JSON)); diff --git a/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java index 5174267815b84..3ff7e057da735 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.profile; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -135,7 +136,7 @@ public void testToXContent() throws IOException { " }\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); result.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -168,7 +169,7 @@ public void testToXContent() throws IOException { " }\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 12345678L), Collections.emptyList()); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); @@ -181,7 +182,7 @@ public void testToXContent() throws IOException { " \"breakdown\" : {\n" + " \"key1\" : 12345678\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); result = new ProfileResult("profileName", "some description", Collections.singletonMap("key1", 1234567890L), Collections.emptyList()); @@ -195,6 +196,6 @@ public void testToXContent() throws IOException { " \"breakdown\" : {\n" + " \"key1\" : 1234567890\n" + " }\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java index 10bf8e2a30013..3d88f8696b1cc 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.profile.query; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -107,7 +108,7 @@ public void testToXContent() throws IOException { " \"time_in_nanos\" : 123356\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); result.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -130,7 +131,7 @@ public void testToXContent() throws IOException { " \"time_in_nanos\" : 123356\n" + " }\n" + " ]\n" + - "}", builder.string()); + "}", Strings.toString(builder)); result = new CollectorResult("collectorName", "some reason", 12345678L, Collections.emptyList()); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); @@ -140,7 +141,7 @@ public void testToXContent() throws IOException { " \"reason\" : \"some reason\",\n" + " \"time\" : \"12.3ms\",\n" + " \"time_in_nanos\" : 12345678\n" + - "}", builder.string()); + "}", Strings.toString(builder)); result = new CollectorResult("collectorName", "some reason", 1234567890L, Collections.emptyList()); builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); @@ -150,6 +151,6 @@ public void testToXContent() throws IOException { " \"reason\" : \"some reason\",\n" + " \"time\" : \"1.2s\",\n" + " \"time_in_nanos\" : 1234567890\n" + - "}", builder.string()); + "}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java index f597d42538541..dfc79026f146a 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilders; @@ -127,7 +128,7 @@ public void testExists() throws Exception { SearchResponse resp = client().prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)).execute().actionGet(); assertSearchResponse(resp); try { - assertEquals(String.format(Locale.ROOT, "exists(%s, %d) mapping: %s response: %s", fieldName, count, mapping.string(), resp), count, resp.getHits().getTotalHits()); + assertEquals(String.format(Locale.ROOT, "exists(%s, %d) mapping: %s response: %s", fieldName, count, Strings.toString(mapping), resp), count, resp.getHits().getTotalHits()); } catch (AssertionError e) { for (SearchHit searchHit : allDocs.getHits()) { final String index = searchHit.getIndex(); diff --git a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 22362f1e514c0..502b10e9a43dd 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -19,11 +19,11 @@ package org.elasticsearch.search.query; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -302,7 +302,7 @@ public void testLenientFlagBeingTooLenient() throws Exception { } public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, InterruptedException, IOException { - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("properties") @@ -312,7 +312,7 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1") .addMapping("type1", mapping, XContentType.JSON); @@ -352,7 +352,7 @@ public void testSimpleQueryStringOnIndexMetaField() throws Exception { public void testEmptySimpleQueryStringWithAnalysis() throws Exception { // https://github.com/elastic/elasticsearch/issues/18202 - String mapping = XContentFactory.jsonBuilder() + String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("properties") @@ -362,7 +362,7 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); CreateIndexRequestBuilder mappingRequest = client().admin().indices() .prepareCreate("test1") diff --git a/server/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java index a32db632e50af..f0f0123b3ce9c 100644 --- a/server/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; @@ -601,7 +602,7 @@ public void testInvalidScrollKeepAlive() throws IOException { private void assertToXContentResponse(ClearScrollResponse response, boolean succeed, int numFreed) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); response.toXContent(builder, ToXContent.EMPTY_PARAMS); - Map map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); assertThat(map.get("succeeded"), is(succeed)); assertThat(map.get("num_freed"), equalTo(numFreed)); } diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index edcfdc2155507..53bd9da2ff1de 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.text.Text; @@ -135,7 +136,7 @@ private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { } jsonBuilder.endArray(); jsonBuilder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, jsonBuilder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(jsonBuilder)); parser.nextToken(); parser.nextToken(); parser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java index a5962dca5951b..b9f73869ba7ab 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -50,7 +51,7 @@ public class SearchSliceIT extends ESIntegTestCase { private static final int NUM_DOCS = 1000; private int setupIndex(boolean withDocs) throws IOException, ExecutionException, InterruptedException { - String mapping = XContentFactory.jsonBuilder(). + String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject() .startObject("type") .startObject("properties") @@ -68,7 +69,7 @@ private int setupIndex(boolean withDocs) throws IOException, ExecutionException, .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); int numberOfShards = randomIntBetween(1, 7); assertAcked(client().admin().indices().prepareCreate("test") .setSettings(Settings.builder().put("number_of_shards", numberOfShards).put("index.max_slices_per_scroll", 10000)) diff --git a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java index c9b4fd80a2936..aa49bed6975b1 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.xcontent.XContentType; @@ -224,7 +225,7 @@ public void testSimpleSorts() throws Exception { } public void testSortMinValueScript() throws IOException { - String mapping = jsonBuilder() + String mapping = Strings.toString(jsonBuilder() .startObject() .startObject("type1") .startObject("properties") @@ -242,7 +243,7 @@ public void testSortMinValueScript() throws IOException { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); ensureGreen(); @@ -343,7 +344,7 @@ public void testDocumentsWithNullValue() throws Exception { // TODO: sort shouldn't fail when sort field is mapped dynamically // We have to specify mapping explicitly because by the time search is performed dynamic mapping might not // be propagated to all nodes yet and sort operation fail when the sort field is not defined - String mapping = jsonBuilder() + String mapping = Strings.toString(jsonBuilder() .startObject() .startObject("type1") .startObject("properties") @@ -355,7 +356,7 @@ public void testDocumentsWithNullValue() throws Exception { .endObject() .endObject() .endObject() - .endObject().string(); + .endObject()); assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java index 06f5ccf696ce4..f267dec2a8623 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -183,7 +184,7 @@ public void testRandomSortBuilders() throws IOException { xContentBuilder.endArray(); } xContentBuilder.endObject(); - List> parsedSort = parseSort(xContentBuilder.string()); + List> parsedSort = parseSort(Strings.toString(xContentBuilder)); assertEquals(testBuilders.size(), parsedSort.size()); Iterator> iterator = testBuilders.iterator(); for (SortBuilder parsedBuilder : parsedSort) { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 01b16bb9fb698..deae6bf1a7ef7 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.FieldMemoryStats; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -1113,17 +1114,17 @@ public void testIssue5930() throws IOException { // see issue #6399 public void testIndexingUnrelatedNullValue() throws Exception { - String mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject() - .endObject() - .endObject() - .string(); + String mapping = Strings + .toString(jsonBuilder() + .startObject() + .startObject(TYPE) + .startObject("properties") + .startObject(FIELD) + .field("type", "completion") + .endObject() + .endObject() + .endObject() + .endObject()); assertAcked(client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, mapping, XContentType.JSON).get()); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 3b1c88cfc5779..feb15044438ec 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -1079,13 +1080,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE assertSuggestionSize(searchSuggest, 0, 10, "title"); // suggest with collate - String filterString = XContentFactory.jsonBuilder() - .startObject() - .startObject("match_phrase") - .field("{{field}}", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String filterString = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("match_phrase") + .field("{{field}}", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title")); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredQuerySuggest); @@ -1098,13 +1099,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE NumShards numShards = getNumShards("test"); // collate suggest with bad query - String incorrectFilterString = XContentFactory.jsonBuilder() - .startObject() - .startObject("test") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String incorrectFilterString = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .field("title", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString); Map> namedSuggestion = new HashMap<>(); namedSuggestion.put("my_title_suggestion", incorrectFilteredSuggest); @@ -1116,13 +1117,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE } // suggest with collation - String filterStringAsFilter = XContentFactory.jsonBuilder() - .startObject() - .startObject("match_phrase") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String filterStringAsFilter = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("match_phrase") + .field("title", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(filterStringAsFilter); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", @@ -1130,13 +1131,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE assertSuggestionSize(searchSuggest, 0, 2, "title"); // collate suggest with bad query - String filterStr = XContentFactory.jsonBuilder() - .startObject() - .startObject("pprefix") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String filterStr = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("pprefix") + .field("title", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder in = suggest.collateQuery(filterStr); try { @@ -1147,13 +1148,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE } // collate script failure due to no additional params - String collateWithParams = XContentFactory.jsonBuilder() - .startObject() - .startObject("{{query_type}}") - .field("{{query_field}}", "{{suggestion}}") - .endObject() - .endObject() - .string(); + String collateWithParams = Strings + .toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("{{query_type}}") + .field("{{query_field}}", "{{suggestion}}") + .endObject() + .endObject()); PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateQuery(collateWithParams); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java index d53cbfdab6e80..d54fa0f705f0d 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java @@ -183,7 +183,7 @@ public void testParsingExceptionOnUnknownSuggestion() throws IOException { builder.endArray(); } builder.endObject(); - BytesReference originalBytes = builder.bytes(); + BytesReference originalBytes = BytesReference.bytes(builder); try (XContentParser parser = createParser(builder.contentType().xContent(), originalBytes)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); ParsingException ex = expectThrows(ParsingException.class, () -> Suggest.fromXContent(parser)); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 18fb907b4a67d..9c62bb28483c1 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -24,11 +24,12 @@ import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; -import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -40,11 +41,9 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextBuilder; @@ -61,7 +60,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { public void testIndexingWithNoContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -71,36 +70,36 @@ public void testIndexingWithNoContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion1", "suggestion2") - .field("weight", 3) - .endObject() - .startObject() - .array("input", "suggestion3", "suggestion4") - .field("weight", 4) - .endObject() - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion1", "suggestion2") + .field("weight", 3) + .endObject() + .startObject() + .array("input", "suggestion3", "suggestion4") + .field("weight", 4) + .endObject() + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } public void testIndexingWithSimpleContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -110,31 +109,31 @@ public void testIndexingWithSimpleContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .field("ctx", "ctx1") - .endObject() - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .field("ctx", "ctx1") + .endObject() + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithSimpleNumberContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -144,31 +143,31 @@ public void testIndexingWithSimpleNumberContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .field("ctx", 100) - .endObject() - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .field("ctx", 100) + .endObject() + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithSimpleBooleanContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -178,31 +177,31 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .field("ctx", true) - .endObject() - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .field("ctx", true) + .endObject() + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithSimpleNULLContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -212,7 +211,7 @@ public void testIndexingWithSimpleNULLContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); XContentBuilder builder = jsonBuilder() @@ -229,12 +228,12 @@ public void testIndexingWithSimpleNULLContexts() throws Exception { .endObject(); Exception e = expectThrows(MapperParsingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), XContentType.JSON))); + () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))); assertEquals("contexts must be a string, number or boolean or a list of string, number or boolean, but was [VALUE_NULL]", e.getCause().getMessage()); } public void testIndexingWithContextList() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -244,29 +243,29 @@ public void testIndexingWithContextList() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startObject("completion") - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .array("ctx", "ctx1", "ctx2", "ctx3") - .endObject() - .field("weight", 5) - .endObject() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startObject("completion") + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .array("ctx", "ctx1", "ctx2", "ctx3") + .endObject() + .field("weight", 5) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithMixedTypeContextList() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -276,29 +275,29 @@ public void testIndexingWithMixedTypeContextList() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startObject("completion") - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .array("ctx", "ctx1", true, 100) - .endObject() - .field("weight", 5) - .endObject() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startObject("completion") + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .array("ctx", "ctx1", true, 100) + .endObject() + .field("weight", 5) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -308,7 +307,7 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); XContentBuilder builder = jsonBuilder() @@ -323,12 +322,12 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { .endObject(); Exception e = expectThrows(MapperParsingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), XContentType.JSON))); + () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))); assertEquals("context array must have string, number or boolean values, but was [VALUE_NULL]", e.getCause().getMessage()); } public void testIndexingWithMultipleContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -342,7 +341,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -360,7 +359,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); @@ -368,7 +367,7 @@ public void testIndexingWithMultipleContexts() throws Exception { public void testQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value("context1"); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -379,7 +378,7 @@ public void testQueryContextParsingBasic() throws Exception { public void testBooleanQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(true); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -390,7 +389,7 @@ public void testBooleanQueryContextParsingBasic() throws Exception { public void testNumberQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(10); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -401,7 +400,7 @@ public void testNumberQueryContextParsingBasic() throws Exception { public void testNULLQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().nullValue(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -413,7 +412,7 @@ public void testQueryContextParsingArray() throws Exception { .value("context1") .value("context2") .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(2)); @@ -432,7 +431,7 @@ public void testQueryContextParsingMixedTypeValuesArray() throws Exception { .value(true) .value(10) .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(4)); @@ -458,7 +457,7 @@ public void testQueryContextParsingMixedTypeValuesArrayHavingNULL() throws Excep .value(10) .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -471,7 +470,7 @@ public void testQueryContextParsingObject() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -486,7 +485,7 @@ public void testQueryContextParsingObjectHavingBoolean() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -501,7 +500,7 @@ public void testQueryContextParsingObjectHavingNumber() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1)); @@ -516,7 +515,7 @@ public void testQueryContextParsingObjectHavingNULL() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -536,7 +535,7 @@ public void testQueryContextParsingObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(2)); @@ -571,7 +570,7 @@ public void testQueryContextParsingMixedTypeObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(4)); @@ -617,7 +616,7 @@ public void testQueryContextParsingMixedTypeObjectArrayHavingNULL() throws Excep .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -640,7 +639,7 @@ public void testQueryContextParsingMixed() throws Exception { .field("prefix", true) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(4)); @@ -674,7 +673,7 @@ public void testQueryContextParsingMixedHavingNULL() throws Exception { .endObject() .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); CategoryContextMapping mapping = ContextBuilder.category("cat").build(); Exception e = expectThrows(ElasticsearchParseException.class, () -> mapping.parseQueryContext(parser)); @@ -682,7 +681,7 @@ public void testQueryContextParsingMixedHavingNULL() throws Exception { } public void testUnknownQueryContextParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -696,7 +695,7 @@ public void testUnknownQueryContextParsing() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 9e22ad64d5c1e..2d179f3dbe6c3 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -48,7 +50,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { public void testIndexingWithNoContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -58,36 +60,36 @@ public void testIndexingWithNoContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion1", "suggestion2") - .field("weight", 3) - .endObject() - .startObject() - .array("input", "suggestion3", "suggestion4") - .field("weight", 4) - .endObject() - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion1", "suggestion2") + .field("weight", 3) + .endObject() + .startObject() + .array("input", "suggestion3", "suggestion4") + .field("weight", 4) + .endObject() + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 7); } public void testIndexingWithSimpleContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -98,34 +100,34 @@ public void testIndexingWithSimpleContexts() throws Exception { .endArray() .endObject() .endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startArray("completion") - .startObject() - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .startObject("ctx") - .field("lat", 43.6624803) - .field("lon", -79.3863353) - .endObject() - .endObject() - .field("weight", 5) - .endObject() - .endArray() - .endObject() - .bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startArray("completion") + .startObject() + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .startObject("ctx") + .field("lat", 43.6624803) + .field("lon", -79.3863353) + .endObject() + .endObject() + .field("weight", 5) + .endObject() + .endArray() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithContextList() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -135,38 +137,38 @@ public void testIndexingWithContextList() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", jsonBuilder() - .startObject() - .startObject("completion") - .array("input", "suggestion5", "suggestion6", "suggestion7") - .startObject("contexts") - .startArray("ctx") - .startObject() - .field("lat", 43.6624803) - .field("lon", -79.3863353) - .endObject() - .startObject() - .field("lat", 43.6624718) - .field("lon", -79.3873227) + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + .bytes(jsonBuilder() + .startObject() + .startObject("completion") + .array("input", "suggestion5", "suggestion6", "suggestion7") + .startObject("contexts") + .startArray("ctx") + .startObject() + .field("lat", 43.6624803) + .field("lon", -79.3863353) + .endObject() + .startObject() + .field("lat", 43.6624718) + .field("lon", -79.3873227) + .endObject() + .endArray() .endObject() - .endArray() - .endObject() - .field("weight", 5) - .endObject() - .endObject() - .bytes(), + .field("weight", 5) + .endObject() + .endObject()), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } public void testIndexingWithMultipleContexts() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") + String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -180,7 +182,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); @@ -198,7 +200,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", builder.bytes(), + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); @@ -206,7 +208,7 @@ public void testIndexingWithMultipleContexts() throws Exception { public void testParsingQueryContextBasic() throws Exception { XContentBuilder builder = jsonBuilder().value("ezs42e44yx96"); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 8)); @@ -225,7 +227,7 @@ public void testParsingQueryContextGeoPoint() throws Exception { .field("lat", 23.654242) .field("lon", 90.047153) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 8)); @@ -248,7 +250,7 @@ public void testParsingQueryContextObject() throws Exception { .field("boost", 10) .array("neighbours", 1, 2, 3) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8)); @@ -286,7 +288,7 @@ public void testParsingQueryContextObjectArray() throws Exception { .array("neighbours", 5) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8 + 1 + 1 + 8)); @@ -329,7 +331,7 @@ public void testParsingQueryContextMixed() throws Exception { .field("lon", 92.112583) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); GeoContextMapping mapping = ContextBuilder.geo("geo").build(); List internalQueryContexts = mapping.parseQueryContext(parser); assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8)); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 922bfd0ef8dad..9cc44e4ae05c1 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -580,7 +580,7 @@ public void testIncludeGlobalState() throws Exception { if(testPipeline) { logger.info("--> creating test pipeline"); - BytesReference pipelineSource = jsonBuilder().startObject() + BytesReference pipelineSource = BytesReference.bytes(jsonBuilder().startObject() .field("description", "my_pipeline") .startArray("processors") .startObject() @@ -588,7 +588,7 @@ public void testIncludeGlobalState() throws Exception { .endObject() .endObject() .endArray() - .endObject().bytes(); + .endObject()); assertAcked(client().admin().cluster().preparePutPipeline("barbaz", pipelineSource, XContentType.JSON).get()); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java index f3d8bba3edb68..44fe0d4dd5c58 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java @@ -81,7 +81,7 @@ public void testRestoreSnapshotRequestParsing() throws IOException { builder.field("ignore_unavailable", indicesOptions.ignoreUnavailable()); } - BytesReference bytes = builder.endObject().bytes(); + BytesReference bytes = BytesReference.bytes(builder.endObject()); request.source(XContentHelper.convertToMap(bytes, true, builder.contentType()).v2()); @@ -146,7 +146,7 @@ public void testCreateSnapshotRequestParsing() throws IOException { builder.field("ignore_unavailable", indicesOptions.ignoreUnavailable()); } - BytesReference bytes = builder.endObject().bytes(); + BytesReference bytes = BytesReference.bytes(builder.endObject()); request.source(XContentHelper.convertToMap(bytes, true, builder.contentType()).v2()); diff --git a/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java b/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java index d4da4f8f1c5cb..7a481100f1372 100644 --- a/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.tasks; import org.elasticsearch.client.Requests; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -151,7 +152,7 @@ private static RawTaskStatus randomRawTaskStatus() throws IOException { builder.field(randomAlphaOfLength(5), randomAlphaOfLength(5)); } builder.endObject(); - return new RawTaskStatus(builder.bytes()); + return new RawTaskStatus(BytesReference.bytes(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index 33047b2d12a6a..fcd80b191b842 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.threadpool; import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; @@ -78,7 +79,7 @@ public void testThatToXContentWritesOutUnboundedCorrectly() throws Exception { info.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - Map map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); assertThat(map, hasKey("foo")); map = (Map) map.get("foo"); assertThat(map, hasKey("queue_size")); @@ -100,7 +101,7 @@ public void testThatToXContentWritesInteger() throws Exception { info.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - Map map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); assertThat(map, hasKey("foo")); map = (Map) map.get("foo"); assertThat(map, hasKey("queue_size")); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 57039f8596671..ed17af26b6a49 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -893,7 +894,7 @@ public void testRenderConnectionInfoXContent() throws IOException { builder.endObject(); assertEquals("{\"test_cluster\":{\"seeds\":[\"0.0.0.0:1\"],\"http_addresses\":[\"0.0.0.0:80\"],\"connected\":true," + "\"num_nodes_connected\":3,\"max_connections_per_cluster\":4,\"initial_connect_timeout\":\"30m\"," + - "\"skip_unavailable\":true}}", builder.string()); + "\"skip_unavailable\":true}}", Strings.toString(builder)); stats = new RemoteConnectionInfo("some_other_cluster", Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS,1), new TransportAddress(TransportAddress.META_ADDRESS,2)), @@ -906,7 +907,7 @@ public void testRenderConnectionInfoXContent() throws IOException { builder.endObject(); assertEquals("{\"some_other_cluster\":{\"seeds\":[\"0.0.0.0:1\",\"0.0.0.0:2\"],\"http_addresses\":[\"0.0.0.0:80\",\"0.0.0.0:81\"]," + "\"connected\":false,\"num_nodes_connected\":0,\"max_connections_per_cluster\":2,\"initial_connect_timeout\":\"30s\"," + - "\"skip_unavailable\":false}}", builder.string()); + "\"skip_unavailable\":false}}", Strings.toString(builder)); } private RemoteConnectionInfo getRemoteConnectionInfo(RemoteClusterConnection connection) throws Exception { diff --git a/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java b/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java index ae232c2c6872b..17fb21441e21d 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateNoopIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESIntegTestCase; @@ -240,7 +241,7 @@ private void updateAndCheckSource(long expectedVersion, XContentBuilder xContent private void updateAndCheckSource(long expectedVersion, Boolean detectNoop, XContentBuilder xContentBuilder) { UpdateResponse updateResponse = update(detectNoop, expectedVersion, xContentBuilder); - assertEquals(updateResponse.getGetResult().sourceRef().utf8ToString(), xContentBuilder.bytes().utf8ToString()); + assertEquals(updateResponse.getGetResult().sourceRef().utf8ToString(), BytesReference.bytes(xContentBuilder).utf8ToString()); } private UpdateResponse update(Boolean detectNoop, long expectedVersion, XContentBuilder xContentBuilder) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java index d2db8a50b8dc4..b732c6b5b42bf 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -112,9 +113,9 @@ public void testNullValue() throws IOException { public void testEmptyName() throws IOException { // after version 5 for (String type : TYPES) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", type).endObject().endObject() - .endObject().endObject().string(); + .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index 3697b4ee2438d..fa851e9c6d802 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.DeprecationHandler; @@ -312,7 +313,7 @@ public static SearchSourceBuilder randomSearchSourceBuilder( jsonBuilder.endObject(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - jsonBuilder.bytes().streamInput()); + BytesReference.bytes(jsonBuilder).streamInput()); parser.nextToken(); parser.nextToken(); parser.nextToken(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 4887f2716e85f..60fe616fd2315 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -964,9 +964,9 @@ public static void checkGeneratedJson(String expected, QueryBuilder source) thro XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); source.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals( - msg(expected, builder.string()), + msg(expected, Strings.toString(builder)), expected.replaceAll("\\s+", ""), - builder.string().replaceAll("\\s+", "")); + Strings.toString(builder).replaceAll("\\s+", "")); } private static String msg(String left, String right) { @@ -1077,7 +1077,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { }); for (String type : currentTypes) { - mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, + mapperService.merge(type, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(type, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", @@ -1089,7 +1089,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { OBJECT_FIELD_NAME, "type=object", GEO_POINT_FIELD_NAME, "type=geo_point", GEO_SHAPE_FIELD_NAME, "type=geo_shape" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + ))), MapperService.MergeReason.MAPPING_UPDATE, false); // also add mappings for two inner field in the object field mapperService.merge(type, new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," + "\"properties\":{\"" + DATE_FIELD_NAME + "\":{\"type\":\"date\"},\"" + diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 28f05b6b92604..792d535dc4339 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -61,7 +61,6 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.AdminClient; @@ -1087,7 +1086,7 @@ public void logClusterState() { public void logSegmentsState(String... indices) throws Exception { IndicesSegmentResponse segsRsp = client().admin().indices().prepareSegments(indices).get(); logger.debug("segments {} state: \n{}", indices.length == 0 ? "[_all]" : indices, - segsRsp.toXContent(JsonXContent.contentBuilder().prettyPrint(), ToXContent.EMPTY_PARAMS).string()); + Strings.toString(segsRsp.toXContent(JsonXContent.contentBuilder().prettyPrint(), ToXContent.EMPTY_PARAMS))); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 0c64c5605ae71..c47fedd3ab6eb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -943,7 +943,7 @@ protected static BytesReference toShuffledXContent(ToXContent toXContent, XConte BytesReference bytes = XContentHelper.toXContent(toXContent, xContentType, params, humanReadable); try (XContentParser parser = parserFunction.apply(xContentType.xContent(), bytes)) { try (XContentBuilder builder = shuffleXContent(parser, rarely(), exceptFieldNames)) { - return builder.bytes(); + return BytesReference.bytes(builder); } } } @@ -1128,8 +1128,8 @@ public static void assertEqualsWithErrorMessageFromXConte expectedJson.endObject(); NotEqualMessageBuilder message = new NotEqualMessageBuilder(); message.compareMaps( - XContentHelper.convertToMap(actualJson.bytes(), false).v2(), - XContentHelper.convertToMap(expectedJson.bytes(), false).v2()); + XContentHelper.convertToMap(BytesReference.bytes(actualJson), false).v2(), + XContentHelper.convertToMap(BytesReference.bytes(expectedJson), false).v2()); throw new AssertionError("Didn't match expected value:\n" + message); } catch (IOException e) { throw new AssertionError("IOException while building failure message", e); @@ -1141,7 +1141,7 @@ public static void assertEqualsWithErrorMessageFromXConte */ protected final XContentParser createParser(XContentBuilder builder) throws IOException { return builder.generator().contentType().xContent() - .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, builder.bytes().streamInput()); + .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index 1868fc34a991f..6cdd3ac7796dc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo; import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo.Failure; import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.ToXContent; @@ -128,14 +127,14 @@ public static Tuple, List> randomStoredFieldValues(Random r break; case 8: byte[] randomBytes = RandomStrings.randomUnicodeOfLengthBetween(random, 10, 50).getBytes(StandardCharsets.UTF_8); - BytesArray randomBytesArray = new BytesArray(randomBytes); - originalValues.add(randomBytesArray); if (xContentType == XContentType.JSON || xContentType == XContentType.YAML) { //JSON and YAML write the base64 format expectedParsedValues.add(Base64.getEncoder().encodeToString(randomBytes)); + originalValues.add(Base64.getEncoder().encodeToString(randomBytes)); } else { //SMILE and CBOR write the original bytes as they support binary format - expectedParsedValues.add(randomBytesArray); + expectedParsedValues.add(randomBytes); + originalValues.add(randomBytes); } break; default: @@ -176,7 +175,7 @@ public static BytesReference randomSource(Random random, XContentType xContentTy builder.startObject(); addFields(random, builder, minNumFields, 0); builder.endObject(); - return builder.bytes(); + return BytesReference.bytes(builder); } catch(IOException e) { throw new RuntimeException(e); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java index cd556a9115ad3..15c650173bf87 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java @@ -55,7 +55,7 @@ public static Map convertToMap(ToXContent part) throws IOExcepti builder.startObject(); part.toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); + return XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); } @@ -209,8 +209,8 @@ public static BytesReference insertRandomFields(XContentType contentType, BytesR } } }; - return XContentTestUtils - .insertIntoXContent(contentType.xContent(), xContent, insertPaths, () -> randomAsciiOfLength(random, 10), value).bytes(); + return BytesReference.bytes(XContentTestUtils + .insertIntoXContent(contentType.xContent(), xContent, insertPaths, () -> randomAsciiOfLength(random, 10), value)); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index ee9b8b3360ada..befc21eb1f697 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -303,7 +303,7 @@ private void wipeClusterSettings() throws IOException { if (mustClear) { adminClient().performRequest("PUT", "/_cluster/settings", emptyMap(), new StringEntity( - clearCommand.string(), ContentType.APPLICATION_JSON)); + Strings.toString(clearCommand), ContentType.APPLICATION_JSON)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index bea9aab3ff784..ca04c0c53d12a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -25,6 +25,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -147,7 +148,7 @@ private XContentType getContentType(Map headers, XContentType[] private BytesRef bodyAsBytesRef(Map bodyAsMap, XContentType xContentType) throws IOException { Map finalBodyAsMap = stash.replaceStashedValues(bodyAsMap); try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType)) { - return builder.map(finalBodyAsMap).bytes().toBytesRef(); + return BytesReference.bytes(builder.map(finalBodyAsMap)).toBytesRef(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java index 245e7956595c1..3383d3bb21d04 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java @@ -22,6 +22,7 @@ import org.apache.http.client.methods.HttpHead; import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -119,7 +120,7 @@ public String getBodyAsString() { .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, body)) { jsonBuilder.copyCurrentStructure(parser); } - bodyAsString = jsonBuilder.string(); + bodyAsString = Strings.toString(jsonBuilder); } catch (IOException e) { throw new UncheckedIOException("unable to convert response body to a string format", e); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/XContentTestUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/XContentTestUtilsTests.java index 4d03e8e1bb005..62857fee9addb 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/XContentTestUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/XContentTestUtilsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.test; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -73,7 +74,7 @@ public void testGetInsertPaths() throws IOException { builder.endObject(); try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes(), builder.contentType())) { + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(builder), builder.contentType())) { parser.nextToken(); List insertPaths = XContentTestUtils.getInsertPaths(parser, new Stack<>()); assertEquals(5, insertPaths.size()); @@ -90,16 +91,16 @@ public void testInsertIntoXContent() throws IOException { XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); builder.endObject(); - builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList(""), - () -> "inn.er1", () -> new HashMap<>()); - builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), Collections.singletonList(""), - () -> "field1", () -> "value1"); - builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), + builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), BytesReference.bytes(builder), + Collections.singletonList(""), () -> "inn.er1", () -> new HashMap<>()); + builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), BytesReference.bytes(builder), + Collections.singletonList(""), () -> "field1", () -> "value1"); + builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), BytesReference.bytes(builder), Collections.singletonList("inn\\.er1"), () -> "inner2", () -> new HashMap<>()); - builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), builder.bytes(), + builder = XContentTestUtils.insertIntoXContent(XContentType.JSON.xContent(), BytesReference.bytes(builder), Collections.singletonList("inn\\.er1"), () -> "field2", () -> "value2"); try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, builder.bytes(), builder.contentType())) { + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(builder), builder.contentType())) { Map map = parser.map(); assertEquals(2, map.size()); assertEquals("value1", map.get("field1")); @@ -148,7 +149,7 @@ public void testInsertRandomXContent() throws IOException { Map resultMap; try (XContentParser parser = createParser(XContentType.JSON.xContent(), - insertRandomFields(builder.contentType(), builder.bytes(), null, random()))) { + insertRandomFields(builder.contentType(), BytesReference.bytes(builder), null, random()))) { resultMap = parser.map(); } assertEquals(5, resultMap.keySet().size()); @@ -162,7 +163,7 @@ public void testInsertRandomXContent() throws IOException { Predicate pathsToExclude = path -> path.endsWith("foo1"); try (XContentParser parser = createParser(XContentType.JSON.xContent(), - insertRandomFields(builder.contentType(), builder.bytes(), pathsToExclude, random()))) { + insertRandomFields(builder.contentType(), BytesReference.bytes(builder), pathsToExclude, random()))) { resultMap = parser.map(); } assertEquals(5, resultMap.keySet().size()); @@ -176,7 +177,7 @@ public void testInsertRandomXContent() throws IOException { pathsToExclude = path -> path.contains("foo1"); try (XContentParser parser = createParser(XContentType.JSON.xContent(), - insertRandomFields(builder.contentType(), builder.bytes(), pathsToExclude, random()))) { + insertRandomFields(builder.contentType(), BytesReference.bytes(builder), pathsToExclude, random()))) { resultMap = parser.map(); } assertEquals(5, resultMap.keySet().size()); diff --git a/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java b/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java index b111024c77ad0..705f86fbb0797 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertionsTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.hamcrest; import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -83,11 +84,11 @@ public void testAssertXContentEquivalent() throws IOException { original.endObject(); try (XContentBuilder copy = JsonXContent.contentBuilder(); - XContentParser parser = createParser(original.contentType().xContent(), original.bytes())) { + XContentParser parser = createParser(original.contentType().xContent(), BytesReference.bytes(original))) { parser.nextToken(); XContentHelper.copyCurrentStructure(copy.generator(), parser); try (XContentBuilder copyShuffled = shuffleXContent(copy) ) { - assertToXContentEquivalent(original.bytes(), copyShuffled.bytes(), original.contentType()); + assertToXContentEquivalent(BytesReference.bytes(original), BytesReference.bytes(copyShuffled), original.contentType()); } } } @@ -118,7 +119,8 @@ public void testAssertXContentEquivalentErrors() throws IOException { } otherBuilder.endObject(); AssertionError error = expectThrows(AssertionError.class, - () -> assertToXContentEquivalent(builder.bytes(), otherBuilder.bytes(), builder.contentType())); + () -> assertToXContentEquivalent(BytesReference.bytes(builder), BytesReference.bytes(otherBuilder), + builder.contentType())); assertThat(error.getMessage(), containsString("f2: expected [value2] but not found")); } { @@ -146,7 +148,8 @@ public void testAssertXContentEquivalentErrors() throws IOException { } otherBuilder.endObject(); AssertionError error = expectThrows(AssertionError.class, - () -> assertToXContentEquivalent(builder.bytes(), otherBuilder.bytes(), builder.contentType())); + () -> assertToXContentEquivalent(BytesReference.bytes(builder), BytesReference.bytes(otherBuilder), + builder.contentType())); assertThat(error.getMessage(), containsString("f2: expected [value2] but was [differentValue2]")); } { @@ -178,7 +181,8 @@ public void testAssertXContentEquivalentErrors() throws IOException { otherBuilder.field("f1", "value"); otherBuilder.endObject(); AssertionError error = expectThrows(AssertionError.class, - () -> assertToXContentEquivalent(builder.bytes(), otherBuilder.bytes(), builder.contentType())); + () -> assertToXContentEquivalent(BytesReference.bytes(builder), BytesReference.bytes(otherBuilder), + builder.contentType())); assertThat(error.getMessage(), containsString("2: expected [three] but was [four]")); } { @@ -207,7 +211,8 @@ public void testAssertXContentEquivalentErrors() throws IOException { } otherBuilder.endObject(); AssertionError error = expectThrows(AssertionError.class, - () -> assertToXContentEquivalent(builder.bytes(), otherBuilder.bytes(), builder.contentType())); + () -> assertToXContentEquivalent(BytesReference.bytes(builder), BytesReference.bytes(otherBuilder), + builder.contentType())); assertThat(error.getMessage(), containsString("expected [1] more entries")); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java index c377b500ccce3..79d6d42092a85 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ObjectPathTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.test.rest.yaml; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -50,7 +51,8 @@ public void testEvaluateObjectPathEscape() throws Exception { xContentBuilder.field("field2.field3", "value2"); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.field2\\.field3"); assertThat(object, instanceOf(String.class)); assertThat(object, equalTo("value2")); @@ -63,7 +65,8 @@ public void testEvaluateObjectPathWithDots() throws Exception { xContentBuilder.field("field2", "value2"); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1..field2"); assertThat(object, instanceOf(String.class)); assertThat(object, equalTo("value2")); @@ -82,7 +85,8 @@ public void testEvaluateInteger() throws Exception { xContentBuilder.field("field2", 333); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.field2"); assertThat(object, instanceOf(Integer.class)); assertThat(object, equalTo(333)); @@ -95,7 +99,8 @@ public void testEvaluateDouble() throws Exception { xContentBuilder.field("field2", 3.55); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.field2"); assertThat(object, instanceOf(Double.class)); assertThat(object, equalTo(3.55)); @@ -108,7 +113,8 @@ public void testEvaluateArray() throws Exception { xContentBuilder.array("array1", "value1", "value2"); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.array1"); assertThat(object, instanceOf(List.class)); List list = (List) object; @@ -137,7 +143,8 @@ public void testEvaluateArrayElementObject() throws Exception { xContentBuilder.endArray(); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("field1.array1.1.element"); assertThat(object, instanceOf(String.class)); assertThat(object, equalTo("value2")); @@ -164,7 +171,8 @@ public void testEvaluateObjectKeys() throws Exception { xContentBuilder.endObject(); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate("metadata.templates"); assertThat(object, instanceOf(Map.class)); Map map = (Map)object; @@ -182,7 +190,8 @@ public void testEvaluateStashInPropertyName() throws Exception { xContentBuilder.endObject(); xContentBuilder.endObject(); xContentBuilder.endObject(); - ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.bytes()); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), + BytesReference.bytes(xContentBuilder)); try { objectPath.evaluate("field1.$placeholder.element1"); fail("evaluate should have failed due to unresolved placeholder"); @@ -246,7 +255,7 @@ public void testEvaluateArrayAsRoot() throws Exception { xContentBuilder.endObject(); xContentBuilder.endArray(); ObjectPath objectPath = ObjectPath.createFromXContent( - XContentFactory.xContent(xContentBuilder.contentType()), xContentBuilder.bytes()); + XContentFactory.xContent(xContentBuilder.contentType()), BytesReference.bytes(xContentBuilder)); Object object = objectPath.evaluate(""); assertThat(object, notNullValue()); assertThat(object, instanceOf(List.class)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java index 65f52352289f0..0705eb32fc294 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java @@ -119,7 +119,7 @@ public void testShuffleXContentExcludeFields() throws IOException { } } builder.endObject(); - BytesReference bytes = builder.bytes(); + BytesReference bytes = BytesReference.bytes(builder); final LinkedHashMap initialMap; try (XContentParser parser = createParser(xContentType.xContent(), bytes)) { initialMap = (LinkedHashMap)parser.mapOrdered(); From 6284b7e720a0b8240d98baa0daa9833bc12e0521 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Wed, 14 Mar 2018 16:19:04 -0700 Subject: [PATCH 71/89] Improve error message for installing plugin (#28298) Provide more actionable error message when installing an offline plugin in the plugins directory, and the `plugins` directory for the node contains plugin distribution. Closes #27401 --- .../plugins/InstallPluginCommand.java | 26 +++++++++++++------ .../plugins/InstallPluginCommandTests.java | 2 +- .../elasticsearch/plugins/PluginsService.java | 2 +- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index b7f201b70aa46..44043f1c68545 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -53,6 +53,7 @@ import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; @@ -218,17 +219,17 @@ void execute(Terminal terminal, String pluginId, boolean isBatch, Environment en throw new UserException(ExitCodes.USAGE, "plugin id is required"); } - Path pluginZip = download(terminal, pluginId, env.tmpFile()); + Path pluginZip = download(terminal, pluginId, env.tmpFile(), env.pluginsFile()); Path extractedZip = unzip(pluginZip, env.pluginsFile()); install(terminal, isBatch, extractedZip, env); } /** Downloads the plugin and returns the file it was downloaded to. */ - private Path download(Terminal terminal, String pluginId, Path tmpDir) throws Exception { + private Path download(Terminal terminal, String pluginId, Path tmpDir, Path pluginsDir) throws Exception { if (OFFICIAL_PLUGINS.contains(pluginId)) { final String url = getElasticUrl(terminal, getStagingHash(), Version.CURRENT, pluginId, Platforms.PLATFORM_NAME); terminal.println("-> Downloading " + pluginId + " from elastic"); - return downloadZipAndChecksum(terminal, url, tmpDir, false); + return downloadZipAndChecksum(terminal, url, tmpDir, pluginsDir, false); } // now try as maven coordinates, a valid URL would only have a colon and slash @@ -236,7 +237,7 @@ private Path download(Terminal terminal, String pluginId, Path tmpDir) throws Ex if (coordinates.length == 3 && pluginId.contains("/") == false && pluginId.startsWith("file:") == false) { String mavenUrl = getMavenUrl(terminal, coordinates, Platforms.PLATFORM_NAME); terminal.println("-> Downloading " + pluginId + " from maven central"); - return downloadZipAndChecksum(terminal, mavenUrl, tmpDir, true); + return downloadZipAndChecksum(terminal, mavenUrl, tmpDir, pluginsDir, true); } // fall back to plain old URL @@ -250,7 +251,7 @@ private Path download(Terminal terminal, String pluginId, Path tmpDir) throws Ex throw new UserException(ExitCodes.USAGE, msg); } terminal.println("-> Downloading " + URLDecoder.decode(pluginId, "UTF-8")); - return downloadZip(terminal, pluginId, tmpDir); + return downloadZip(terminal, pluginId, tmpDir, pluginsDir); } // pkg private so tests can override @@ -324,9 +325,17 @@ private List checkMisspelledPlugin(String pluginId) { /** Downloads a zip from the url, into a temp file under the given temp dir. */ // pkg private for tests @SuppressForbidden(reason = "We use getInputStream to download plugins") - Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException { + Path downloadZip(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir) throws IOException { terminal.println(VERBOSE, "Retrieving zip from " + urlString); URL url = new URL(urlString); + if (url.getProtocol().equals("file")) { + Path pluginsFile = Paths.get(url.getFile()); + if (pluginsFile.startsWith(pluginsDir)) { + throw new IllegalStateException("Installation failed! " + + "Make sure the plugins directory [" + pluginsDir + "] can not contain the plugin distribution [" + + pluginsFile + "]; move the distribution to an alternate location!"); + } + } Path zip = Files.createTempFile(tmpDir, null, ".zip"); URLConnection urlConnection = url.openConnection(); urlConnection.addRequestProperty("User-Agent", "elasticsearch-plugin-installer"); @@ -375,8 +384,9 @@ public void onProgress(int percent) { /** Downloads a zip from the url, as well as a SHA512 (or SHA1) checksum, and checks the checksum. */ // pkg private for tests @SuppressForbidden(reason = "We use openStream to download plugins") - private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir, boolean allowSha1) throws Exception { - Path zip = downloadZip(terminal, urlString, tmpDir); + private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir, boolean allowSha1) + throws Exception { + Path zip = downloadZip(terminal, urlString, tmpDir, pluginsDir); pathsToDeleteOnShutdown.add(zip); String checksumUrlString = urlString + ".sha512"; URL checksumUrl = openUrl(checksumUrlString); diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index d799cb0407f58..0735c579a255f 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -981,7 +981,7 @@ void assertInstallPluginFromUrl(String pluginId, String name, String url, String Path pluginZip = createPlugin(name, pluginDir); InstallPluginCommand command = new InstallPluginCommand() { @Override - Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException { + Path downloadZip(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir) throws IOException { assertEquals(url, urlString); Path downloadedPath = tmpDir.resolve("downloaded.zip"); Files.copy(pluginZip, downloadedPath); diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index 4514691e4bec4..cca85d28aa137 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -328,7 +328,7 @@ public String name() { public Collection bundles() { return bundles; } - + } /** From 48325ecb5f55593330379674135cf6822bdb5c28 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 14 Mar 2018 22:02:06 +0100 Subject: [PATCH 72/89] [Docs] Fix Java Api index administration usage (#28260) The Java API documentation for index administration currenty is wrong because the PutMappingRequestBuilder#setSource(Object... source) an CreateIndexRequestBuilder#addMapping(String type, Object... source) methods delegate to methods that check that the input arguments are valid key/value pairs. This changes the docs so the java api code examples are included from documentation integration tests so we detect compile and runtime issues earlier. Closes #28131 --- .../admin/indices/put-mapping.asciidoc | 41 ++---------- .../documentation/IndicesDocumentationIT.java | 67 ++++++++++++++++++- 2 files changed, 69 insertions(+), 39 deletions(-) diff --git a/docs/java-api/admin/indices/put-mapping.asciidoc b/docs/java-api/admin/indices/put-mapping.asciidoc index fa3e72582eec5..a70e055f3a0ce 100644 --- a/docs/java-api/admin/indices/put-mapping.asciidoc +++ b/docs/java-api/admin/indices/put-mapping.asciidoc @@ -13,32 +13,9 @@ include-tagged::{client-tests}/IndicesDocumentationIT.java[index-with-mapping] The PUT mapping API also allows to add a new type to an existing index: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -client.admin().indices().preparePutMapping("twitter") <1> - .setType("user") <2> - .setSource("{\n" + <3> - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - "}") - .get(); - -// You can also provide the type in the source document -client.admin().indices().preparePutMapping("twitter") - .setType("user") - .setSource("{\n" + - " \"user\":{\n" + <4> - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}") - .get(); +include-tagged::{client-tests}/IndicesDocumentationIT.java[putMapping-request-source] -------------------------------------------------- <1> Puts a mapping on existing index called `twitter` <2> Adds a `user` mapping type. @@ -47,20 +24,10 @@ client.admin().indices().preparePutMapping("twitter") You can use the same API to update an existing mapping: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -client.admin().indices().preparePutMapping("twitter") <1> - .setType("user") <2> - .setSource("{\n" + <3> - " \"properties\": {\n" + - " \"user_name\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - "}") - .get(); +include-tagged::{client-tests}/IndicesDocumentationIT.java[putMapping-request-source-append] -------------------------------------------------- <1> Puts a mapping on existing index called `twitter` <2> Updates the `user` mapping type. <3> This `user` has now a new field `user_name` - diff --git a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java index e52b03082254f..064702170d5bb 100644 --- a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java +++ b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java @@ -19,13 +19,32 @@ package org.elasticsearch.client.documentation; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESIntegTestCase; +/** + * This class is used to generate the Java indices administration documentation. + * You need to wrap your code between two tags like: + * // tag::example[] + * // end::example[] + * + * Where example is your tag name. + * + * Then in the documentation, you can extract what is between tag and end tags + * with ["source","java",subs="attributes,callouts,macros"] + * -------------------------------------------------- + * include-tagged::{client-tests}/IndicesDocumentationIT.java[your-example-tag-here] + * -------------------------------------------------- + */ public class IndicesDocumentationIT extends ESIntegTestCase { - public void createMappings() { + /** + * This test method is used to generate the Put Mapping Java Indices API documentation + * at "docs/java-api/admin/indices/put-mapping.asciidoc" so the documentation gets tested + * so that it compiles and runs without throwing errors at runtime. + */ + public void testPutMappingDocumentation() throws Exception { Client client = client(); // tag::index-with-mapping @@ -39,6 +58,50 @@ public void createMappings() { "}") .get(); // end::index-with-mapping + + // we need to delete in order to create a fresh new index with another type + client.admin().indices().prepareDelete("twitter").get(); + client.admin().indices().prepareCreate("twitter").get(); + + // tag::putMapping-request-source + client.admin().indices().preparePutMapping("twitter") // <1> + .setType("user") // <2> + .setSource("{\n" + // <3> + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + "}", XContentType.JSON) + .get(); + + // You can also provide the type in the source document + client.admin().indices().preparePutMapping("twitter") + .setType("user") + .setSource("{\n" + + " \"user\":{\n" + // <4> + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}", XContentType.JSON) + .get(); + // end::putMapping-request-source + + // tag::putMapping-request-source-append + client.admin().indices().preparePutMapping("twitter") // <1> + .setType("user") // <2> + .setSource("{\n" + // <3> + " \"properties\": {\n" + + " \"user_name\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + "}", XContentType.JSON) + .get(); + // end::putMapping-request-source-append } } From a086e6500b323210e8387ff00e9277885f89187a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 15 Mar 2018 11:49:45 +0100 Subject: [PATCH 73/89] Mute failing GetResultTests and DocumentFieldTests --- .../java/org/elasticsearch/index/get/DocumentFieldTests.java | 1 + .../test/java/org/elasticsearch/index/get/GetResultTests.java | 2 ++ 2 files changed, 3 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java index 9d581054f46b8..d3c8af0d0f70e 100644 --- a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java @@ -55,6 +55,7 @@ public void testEqualsAndHashcode() { DocumentFieldTests::mutateDocumentField); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContent() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomDocumentField(xContentType); diff --git a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java index a38d183299cdd..18b14ac4b0506 100644 --- a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -49,6 +49,7 @@ public class GetResultTests extends ESTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContent() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); @@ -86,6 +87,7 @@ public void testToXContent() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContentEmbedded() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); From ead742e2fae2679cd735b2273bb0ba7bc02e0e8e Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 15 Mar 2018 16:39:02 +0100 Subject: [PATCH 74/89] Reenable LiveVersionMapTests.testRamBytesUsed on Java 9. (#29063) I also had to make the test more lenient. This is due to the fact that Lucene's RamUsageTester was changed in order not to reflect `java.*` classes and the way that it estimates ram usage of maps is by assuming it has similar memory usage to an `Object[]` array that stores all keys and values. The implementation in `LiveVersionMap` tries to be slightly more realistic by taking the load factor and linked lists into account, so it usually gives a higher estimate which happens to be closer to reality. Closes #22548 --- .../index/engine/LiveVersionMapTests.java | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index 8bfe256fe0b8a..8c5973e8750fd 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -21,10 +21,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.Constants; import org.apache.lucene.util.RamUsageTester; import org.apache.lucene.util.TestUtil; -import org.elasticsearch.Assertions; -import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.test.ESTestCase; @@ -43,7 +42,6 @@ public class LiveVersionMapTests extends ESTestCase { public void testRamBytesUsed() throws Exception { - assumeTrue("Test disabled for JDK 9", JavaVersion.current().compareTo(JavaVersion.parse("9")) < 0); LiveVersionMap map = new LiveVersionMap(); for (int i = 0; i < 100000; ++i) { BytesRefBuilder uid = new BytesRefBuilder(); @@ -72,8 +70,23 @@ public void testRamBytesUsed() throws Exception { } actualRamBytesUsed = RamUsageTester.sizeOf(map); estimatedRamBytesUsed = map.ramBytesUsed(); - // less than 25% off - assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, actualRamBytesUsed / 4); + long tolerance; + if (Constants.JRE_IS_MINIMUM_JAVA9) { + // With Java 9, RamUsageTester computes the memory usage of maps as + // the memory usage of an array that would contain exactly all keys + // and values. This is an under-estimation of the actual memory + // usage since it ignores the impact of the load factor and of the + // linked list/tree that is used to resolve collisions. So we use a + // bigger tolerance. + // less than 50% off + tolerance = actualRamBytesUsed / 2; + } else { + // Java 8 is more accurate by doing reflection into the actual JDK classes + // so we give it a lower error bound. + // less than 25% off + tolerance = actualRamBytesUsed / 4; + } + assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, tolerance); } private BytesRef uid(String string) { From 0730ee96c777d606b3b625c1afd8654afcd8e2a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 15 Mar 2018 16:42:26 +0100 Subject: [PATCH 75/89] [Tests] Fix GetResultTests and DocumentFieldTests failures (#29083) Changes made in #28972 seems to have changed some assumptions about how SMILE and CBOR write byte[] values and how this is tested. This changes the generation of the randomized DocumentField values back to BytesArray while expecting the JSON and YAML deserialisation to produce Base64 encoded strings and SMILE and CBOR to parse back BytesArray instances. Closes #29080 --- .../org/elasticsearch/index/get/DocumentFieldTests.java | 1 - .../java/org/elasticsearch/index/get/GetResultTests.java | 2 -- .../main/java/org/elasticsearch/test/RandomObjects.java | 8 +++++--- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java index d3c8af0d0f70e..9d581054f46b8 100644 --- a/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/DocumentFieldTests.java @@ -55,7 +55,6 @@ public void testEqualsAndHashcode() { DocumentFieldTests::mutateDocumentField); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContent() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomDocumentField(xContentType); diff --git a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java index 18b14ac4b0506..a38d183299cdd 100644 --- a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -49,7 +49,6 @@ public class GetResultTests extends ESTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContent() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); @@ -87,7 +86,6 @@ public void testToXContent() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29080") public void testToAndFromXContentEmbedded() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); Tuple tuple = randomGetResult(xContentType); diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index 6cdd3ac7796dc..a509645495858 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -21,10 +21,12 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo; import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo.Failure; import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.ToXContent; @@ -127,14 +129,14 @@ public static Tuple, List> randomStoredFieldValues(Random r break; case 8: byte[] randomBytes = RandomStrings.randomUnicodeOfLengthBetween(random, 10, 50).getBytes(StandardCharsets.UTF_8); + BytesArray randomBytesArray = new BytesArray(randomBytes); + originalValues.add(randomBytesArray); if (xContentType == XContentType.JSON || xContentType == XContentType.YAML) { //JSON and YAML write the base64 format expectedParsedValues.add(Base64.getEncoder().encodeToString(randomBytes)); - originalValues.add(Base64.getEncoder().encodeToString(randomBytes)); } else { //SMILE and CBOR write the original bytes as they support binary format - expectedParsedValues.add(randomBytes); - originalValues.add(randomBytes); + expectedParsedValues.add(randomBytesArray); } break; default: From 72317fbe1d8363462e4d4dbe7d2dc9e35fee8f7b Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 15 Mar 2018 16:43:56 +0100 Subject: [PATCH 76/89] Validate regular expressions in dynamic templates. (#29013) Today you would only get these errors at index time. Relates #24749 --- .../index/mapper/DynamicTemplate.java | 19 ++++++++++++++++++- .../index/mapper/DynamicTemplateTests.java | 13 +++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java index 8f3634e0132af..ca323b4ac5285 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java @@ -216,7 +216,24 @@ public static DynamicTemplate parse(String name, Map conf, } } } - return new DynamicTemplate(name, pathMatch, pathUnmatch, match, unmatch, xcontentFieldType, MatchType.fromString(matchPattern), mapping); + + final MatchType matchType = MatchType.fromString(matchPattern); + + if (indexVersionCreated.onOrAfter(Version.V_6_3_0)) { + // Validate that the pattern + for (String regex : new String[] { pathMatch, match, pathUnmatch, unmatch }) { + if (regex == null) { + continue; + } + try { + matchType.matches(regex, ""); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Pattern [" + regex + "] of type [" + matchType + "] is invalid. Cannot create dynamic template [" + name + "].", e); + } + } + } + + return new DynamicTemplate(name, pathMatch, pathUnmatch, match, unmatch, xcontentFieldType, matchType, mapping); } private final String name; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index 562d54a92babd..f48603d30515f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -62,6 +62,19 @@ public void testParseUnknownMatchType() { e.getMessage()); } + public void testParseInvalidRegex() { + for (String param : new String[] { "path_match", "match", "path_unmatch", "unmatch" }) { + Map templateDef = new HashMap<>(); + templateDef.put("match", "foo"); + templateDef.put(param, "*a"); + templateDef.put("match_pattern", "regex"); + templateDef.put("mapping", Collections.singletonMap("store", true)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> DynamicTemplate.parse("my_template", templateDef, Version.V_6_3_0)); + assertEquals("Pattern [*a] of type [regex] is invalid. Cannot create dynamic template [my_template].", e.getMessage()); + } + } + public void testMatchAllTemplate() { Map templateDef = new HashMap<>(); templateDef.put("match_mapping_type", "*"); From 1305da71d6cb1eff049c9b406ec8f7341fadb8f4 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 15 Mar 2018 12:10:30 -0400 Subject: [PATCH 77/89] Docs: HighLevelRestClient#exists (#29073) Docs: HighLevelRestClient#exists Add documentation for `HighLevelRestClient#exists`. Relates to #28389 --- .../documentation/CRUDDocumentationIT.java | 43 +++++++++++++ .../high-level/document/exists.asciidoc | 60 +++++++++++++++++++ .../high-level/document/get.asciidoc | 1 + .../high-level/supported-apis.asciidoc | 1 + 4 files changed, 105 insertions(+) create mode 100644 docs/java-rest/high-level/document/exists.asciidoc diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 95e5364756424..a12bd48f22242 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -932,6 +932,49 @@ public void onFailure(Exception e) { } } + public void testExists() throws Exception { + RestHighLevelClient client = highLevelClient(); + // tag::exists-request + GetRequest getRequest = new GetRequest( + "posts", // <1> + "doc", // <2> + "1"); // <3> + getRequest.fetchSourceContext(new FetchSourceContext(false)); // <4> + getRequest.storedFields("_none_"); // <5> + // end::exists-request + { + // tag::exists-execute + boolean exists = client.exists(getRequest); + // end::exists-execute + assertFalse(exists); + } + { + // tag::exists-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(Boolean exists) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::exists-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::exists-execute-async + client.existsAsync(getRequest, listener); // <1> + // end::exists-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testBulkProcessor() throws InterruptedException { RestHighLevelClient client = highLevelClient(); { diff --git a/docs/java-rest/high-level/document/exists.asciidoc b/docs/java-rest/high-level/document/exists.asciidoc new file mode 100644 index 0000000000000..d14c9fdd66a05 --- /dev/null +++ b/docs/java-rest/high-level/document/exists.asciidoc @@ -0,0 +1,60 @@ +[[java-rest-high-document-exists]] +=== Exists API + +The exists API returns `true` if a document exists, and `false` otherwise. + +[[java-rest-high-document-exists-request]] +==== Exists Request + +It uses `GetRequest` just like the <>. +All of its <> +are supported. Since `exists()` only returns `true` or `false`, we recommend +turning off fetching `_source` and any stored fields so the request is +slightly lighter: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-request] +-------------------------------------------------- +<1> Index +<2> Type +<3> Document id +<4> Disable fetching `_source`. +<5> Disable fetching stored fields. + +[[java-rest-high-document-exists-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute] +-------------------------------------------------- + +[[java-rest-high-document-exists-async]] +==== Asynchronous Execution + +The asynchronous execution of exists request requires both the `GetRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute-async] +-------------------------------------------------- +<1> The `GetRequest` to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `GetResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/CRUDDocumentationIT.java[exists-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of failure. The raised exception is provided as an argument. diff --git a/docs/java-rest/high-level/document/get.asciidoc b/docs/java-rest/high-level/document/get.asciidoc index 07a0b7c1a6721..9d04e138eea1e 100644 --- a/docs/java-rest/high-level/document/get.asciidoc +++ b/docs/java-rest/high-level/document/get.asciidoc @@ -14,6 +14,7 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request] <2> Type <3> Document id +[[java-rest-high-document-get-request-optional-arguments]] ==== Optional arguments The following arguments can optionally be provided: diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index fa2f57069ba93..79f17db577421 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -17,6 +17,7 @@ Multi-document APIs:: include::document/index.asciidoc[] include::document/get.asciidoc[] +include::document/exists.asciidoc[] include::document/delete.asciidoc[] include::document/update.asciidoc[] include::document/bulk.asciidoc[] From 0dcd0f795392a84d51350aa63ff56a939252747a Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 15 Mar 2018 10:51:46 -0700 Subject: [PATCH 78/89] Revert "Improve error message for installing plugin (#28298)" This reverts commit 6284b7e720a0b8240d98baa0daa9833bc12e0521. The reason is that Windows test are failing, because of the incorrect path for the plugin --- .../plugins/InstallPluginCommand.java | 26 ++++++------------- .../plugins/InstallPluginCommandTests.java | 2 +- .../elasticsearch/plugins/PluginsService.java | 2 +- 3 files changed, 10 insertions(+), 20 deletions(-) diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 44043f1c68545..b7f201b70aa46 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -53,7 +53,6 @@ import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; @@ -219,17 +218,17 @@ void execute(Terminal terminal, String pluginId, boolean isBatch, Environment en throw new UserException(ExitCodes.USAGE, "plugin id is required"); } - Path pluginZip = download(terminal, pluginId, env.tmpFile(), env.pluginsFile()); + Path pluginZip = download(terminal, pluginId, env.tmpFile()); Path extractedZip = unzip(pluginZip, env.pluginsFile()); install(terminal, isBatch, extractedZip, env); } /** Downloads the plugin and returns the file it was downloaded to. */ - private Path download(Terminal terminal, String pluginId, Path tmpDir, Path pluginsDir) throws Exception { + private Path download(Terminal terminal, String pluginId, Path tmpDir) throws Exception { if (OFFICIAL_PLUGINS.contains(pluginId)) { final String url = getElasticUrl(terminal, getStagingHash(), Version.CURRENT, pluginId, Platforms.PLATFORM_NAME); terminal.println("-> Downloading " + pluginId + " from elastic"); - return downloadZipAndChecksum(terminal, url, tmpDir, pluginsDir, false); + return downloadZipAndChecksum(terminal, url, tmpDir, false); } // now try as maven coordinates, a valid URL would only have a colon and slash @@ -237,7 +236,7 @@ private Path download(Terminal terminal, String pluginId, Path tmpDir, Path plug if (coordinates.length == 3 && pluginId.contains("/") == false && pluginId.startsWith("file:") == false) { String mavenUrl = getMavenUrl(terminal, coordinates, Platforms.PLATFORM_NAME); terminal.println("-> Downloading " + pluginId + " from maven central"); - return downloadZipAndChecksum(terminal, mavenUrl, tmpDir, pluginsDir, true); + return downloadZipAndChecksum(terminal, mavenUrl, tmpDir, true); } // fall back to plain old URL @@ -251,7 +250,7 @@ private Path download(Terminal terminal, String pluginId, Path tmpDir, Path plug throw new UserException(ExitCodes.USAGE, msg); } terminal.println("-> Downloading " + URLDecoder.decode(pluginId, "UTF-8")); - return downloadZip(terminal, pluginId, tmpDir, pluginsDir); + return downloadZip(terminal, pluginId, tmpDir); } // pkg private so tests can override @@ -325,17 +324,9 @@ private List checkMisspelledPlugin(String pluginId) { /** Downloads a zip from the url, into a temp file under the given temp dir. */ // pkg private for tests @SuppressForbidden(reason = "We use getInputStream to download plugins") - Path downloadZip(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir) throws IOException { + Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException { terminal.println(VERBOSE, "Retrieving zip from " + urlString); URL url = new URL(urlString); - if (url.getProtocol().equals("file")) { - Path pluginsFile = Paths.get(url.getFile()); - if (pluginsFile.startsWith(pluginsDir)) { - throw new IllegalStateException("Installation failed! " + - "Make sure the plugins directory [" + pluginsDir + "] can not contain the plugin distribution [" + - pluginsFile + "]; move the distribution to an alternate location!"); - } - } Path zip = Files.createTempFile(tmpDir, null, ".zip"); URLConnection urlConnection = url.openConnection(); urlConnection.addRequestProperty("User-Agent", "elasticsearch-plugin-installer"); @@ -384,9 +375,8 @@ public void onProgress(int percent) { /** Downloads a zip from the url, as well as a SHA512 (or SHA1) checksum, and checks the checksum. */ // pkg private for tests @SuppressForbidden(reason = "We use openStream to download plugins") - private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir, boolean allowSha1) - throws Exception { - Path zip = downloadZip(terminal, urlString, tmpDir, pluginsDir); + private Path downloadZipAndChecksum(Terminal terminal, String urlString, Path tmpDir, boolean allowSha1) throws Exception { + Path zip = downloadZip(terminal, urlString, tmpDir); pathsToDeleteOnShutdown.add(zip); String checksumUrlString = urlString + ".sha512"; URL checksumUrl = openUrl(checksumUrlString); diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 0735c579a255f..d799cb0407f58 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -981,7 +981,7 @@ void assertInstallPluginFromUrl(String pluginId, String name, String url, String Path pluginZip = createPlugin(name, pluginDir); InstallPluginCommand command = new InstallPluginCommand() { @Override - Path downloadZip(Terminal terminal, String urlString, Path tmpDir, Path pluginsDir) throws IOException { + Path downloadZip(Terminal terminal, String urlString, Path tmpDir) throws IOException { assertEquals(url, urlString); Path downloadedPath = tmpDir.resolve("downloaded.zip"); Files.copy(pluginZip, downloadedPath); diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index cca85d28aa137..4514691e4bec4 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -328,7 +328,7 @@ public String name() { public Collection bundles() { return bundles; } - + } /** From 1b8ecf85768d43c4389afc72e4c7c56eb08ce4cd Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 15 Mar 2018 11:40:20 -0700 Subject: [PATCH 79/89] [DOCS] Add X-Pack upgrade details (#29038) --- docs/reference/upgrade/cluster_restart.asciidoc | 5 +++++ docs/reference/upgrade/rolling_upgrade.asciidoc | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/docs/reference/upgrade/cluster_restart.asciidoc b/docs/reference/upgrade/cluster_restart.asciidoc index a8f2f51dad2aa..bdd8a8207ff83 100644 --- a/docs/reference/upgrade/cluster_restart.asciidoc +++ b/docs/reference/upgrade/cluster_restart.asciidoc @@ -26,6 +26,9 @@ recovery. include::synced-flush.asciidoc[] -- +. *Stop any machine learning jobs that are running.* See +{xpack-ref}/stopping-ml.html[Stopping Machine Learning]. + . *Shutdown all nodes.* + -- @@ -124,3 +127,5 @@ GET _cat/recovery -------------------------------------------------- // CONSOLE -- + +. *Restart machine learning jobs.* diff --git a/docs/reference/upgrade/rolling_upgrade.asciidoc b/docs/reference/upgrade/rolling_upgrade.asciidoc index 2b46b65f2617f..5af521303175c 100644 --- a/docs/reference/upgrade/rolling_upgrade.asciidoc +++ b/docs/reference/upgrade/rolling_upgrade.asciidoc @@ -32,6 +32,9 @@ include::synced-flush.asciidoc[] -- +. *Stop any machine learning jobs that are running.* See +{xpack-ref}/stopping-ml.html[Stopping Machine Learning]. + . [[upgrade-node]] *Shut down a single node*. + -- @@ -147,6 +150,8 @@ for each node that needs to be updated. -- +. *Restart machine learning jobs.* + [IMPORTANT] ==================================================== From 8bf18567710317b8c3ce931364748631a3aa82d6 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 15 Mar 2018 14:42:15 -0400 Subject: [PATCH 80/89] TEST: write ops should execute under shard permit (#28966) Currently ESIndexLevelReplicationTestCase executes write operations without acquiring index shard permit. This may prevent the primary term on replica from being updated or cause a race between resync and indexing on primary. This commit ensures that write operations are always executed under shard permit like the production code. --- .../ESIndexLevelReplicationTestCase.java | 29 ++++++++++++++----- .../IndexLevelReplicationTests.java | 4 +-- .../RecoveryDuringReplicationTests.java | 6 ++-- 3 files changed, 26 insertions(+), 13 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/server/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index f74ffdc4b4dc4..ad046dddc0c27 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -456,6 +456,10 @@ public void onFailure(Exception e) { } } + IndexShard getPrimaryShard() { + return replicationGroup.primary; + } + protected abstract PrimaryResult performOnPrimary(IndexShard primary, Request request) throws Exception; protected abstract void performOnReplica(ReplicaRequest request, IndexShard replica) throws Exception; @@ -592,7 +596,7 @@ protected PrimaryResult performOnPrimary(IndexShard primary, BulkShardRequest re @Override protected void performOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { - executeShardBulkOnReplica(replica, request); + executeShardBulkOnReplica(request, replica, getPrimaryShard().getPrimaryTerm(), getPrimaryShard().getGlobalCheckpoint()); } } @@ -602,15 +606,24 @@ private TransportWriteAction.WritePrimaryResult result = - TransportShardBulkAction.performOnPrimary(request, primary, null, - System::currentTimeMillis, new TransportShardBulkActionTests.NoopMappingUpdatePerformer()); + final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); + primary.acquirePrimaryOperationPermit(permitAcquiredFuture, ThreadPool.Names.SAME, request); + final TransportWriteAction.WritePrimaryResult result; + try (Releasable ignored = permitAcquiredFuture.actionGet()) { + result = TransportShardBulkAction.performOnPrimary(request, primary, null, System::currentTimeMillis, + new TransportShardBulkActionTests.NoopMappingUpdatePerformer()); + } TransportWriteActionTestHelper.performPostWriteActions(primary, request, result.location, logger); return result; } - private void executeShardBulkOnReplica(IndexShard replica, BulkShardRequest request) throws Exception { - final Translog.Location location = TransportShardBulkAction.performOnReplica(request, replica); + private void executeShardBulkOnReplica(BulkShardRequest request, IndexShard replica, long operationPrimaryTerm, long globalCheckpointOnPrimary) throws Exception { + final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); + replica.acquireReplicaOperationPermit(operationPrimaryTerm, globalCheckpointOnPrimary, permitAcquiredFuture, ThreadPool.Names.SAME, request); + final Translog.Location location; + try (Releasable ignored = permitAcquiredFuture.actionGet()) { + location = TransportShardBulkAction.performOnReplica(request, replica); + } TransportWriteActionTestHelper.performPostWriteActions(replica, request, location, logger); } @@ -630,8 +643,8 @@ BulkShardRequest indexOnPrimary(IndexRequest request, IndexShard primary) throws /** * indexes the given requests on the supplied replica shard */ - void indexOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { - executeShardBulkOnReplica(replica, request); + void indexOnReplica(BulkShardRequest request, ReplicationGroup group, IndexShard replica) throws Exception { + executeShardBulkOnReplica(request, replica, group.primary.getPrimaryTerm(), group.primary.getGlobalCheckpoint()); } class GlobalCheckpointSync extends ReplicationAction< diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 8c15a2a84ddb8..86436d8d88ac9 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -209,7 +209,7 @@ public void testConflictingOpsOnReplica() throws Exception { logger.info("--> isolated replica " + replica1.routingEntry()); BulkShardRequest replicationRequest = indexOnPrimary(indexRequest, shards.getPrimary()); for (int i = 1; i < replicas.size(); i++) { - indexOnReplica(replicationRequest, replicas.get(i)); + indexOnReplica(replicationRequest, shards, replicas.get(i)); } logger.info("--> promoting replica to primary " + replica1.routingEntry()); @@ -318,7 +318,7 @@ public void testSeqNoCollision() throws Exception { logger.info("--> Isolate replica1"); IndexRequest indexDoc1 = new IndexRequest(index.getName(), "type", "d1").source("{}", XContentType.JSON); BulkShardRequest replicationRequest = indexOnPrimary(indexDoc1, shards.getPrimary()); - indexOnReplica(replicationRequest, replica2); + indexOnReplica(replicationRequest, shards, replica2); final Translog.Operation op1; final List initOperations = new ArrayList<>(initDocs); diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index dcfa2cb34a2db..66e2a09750a2d 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -236,7 +236,7 @@ public void testRecoveryAfterPrimaryPromotion() throws Exception { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "rollback_" + i) .source("{}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); - indexOnReplica(bulkShardRequest, replica); + indexOnReplica(bulkShardRequest, shards, replica); } if (randomBoolean()) { oldPrimary.flush(new FlushRequest(index.getName())); @@ -326,7 +326,7 @@ public void testReplicaRollbackStaleDocumentsInPeerRecovery() throws Exception { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "stale_" + i) .source("{}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); - indexOnReplica(bulkShardRequest, replica); + indexOnReplica(bulkShardRequest, shards, replica); } shards.flush(); shards.promoteReplicaToPrimary(newPrimary).get(); @@ -374,7 +374,7 @@ public void testResyncAfterPrimaryPromotion() throws Exception { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_" + i) .source("{}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); - indexOnReplica(bulkShardRequest, newPrimary); + indexOnReplica(bulkShardRequest, shards, newPrimary); } logger.info("--> resyncing replicas"); PrimaryReplicaSyncer.ResyncTask task = shards.promoteReplicaToPrimary(newPrimary).get(); From 072b3bf81bd0c5d9eb1d3a0f68fe8a98f48bd97e Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 14 Mar 2018 07:12:15 -0700 Subject: [PATCH 81/89] Fix Parsing Bug with Update By Query for Stored Scripts (#29039) This changes the parsing logic for stored scripts in update by query to match the parsing logic for scripts in general Elasticsearch. Closes #28002 --- .../reindex/RestUpdateByQueryAction.java | 12 ++- .../test/update_by_query/10_script.yml | 81 +++++++++++++++++++ 2 files changed, 91 insertions(+), 2 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 8b898244c0750..8f09afbb17c6c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -86,7 +86,7 @@ private static Script parseScript(Object config) { Map configMap = (Map) config; String script = null; ScriptType type = null; - String lang = DEFAULT_SCRIPT_LANG; + String lang = null; Map params = Collections.emptyMap(); for (Iterator> itr = configMap.entrySet().iterator(); itr.hasNext();) { Map.Entry entry = itr.next(); @@ -126,7 +126,15 @@ private static Script parseScript(Object config) { } assert type != null : "if script is not null, type should definitely not be null"; - return new Script(type, lang, script, params); + if (type == ScriptType.STORED) { + if (lang != null) { + throw new IllegalArgumentException("lang cannot be specified for stored scripts"); + } + + return new Script(type, null, script, null, params); + } else { + return new Script(type, lang == null ? DEFAULT_SCRIPT_LANG : lang, script, params); + } } else { throw new IllegalArgumentException("Script value should be a String or a Map"); } diff --git a/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml index ea9fa33e6a9cf..6b2fb00dc160c 100644 --- a/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml +++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml @@ -340,3 +340,84 @@ source: if (ctx._source.user == "kimchy") {ctx.op = "update"} else {ctx.op = "junk"} - match: { error.reason: 'Operation type [junk] not allowed, only [noop, index, delete] are allowed' } + +--- +"Update all docs with one deletion and one noop using a stored script": + - do: + index: + index: twitter + type: tweet + id: 1 + body: { "level": 9, "last_updated": "2016-01-01T12:10:30Z" } + - do: + index: + index: twitter + type: tweet + id: 2 + body: { "level": 10, "last_updated": "2016-01-01T12:10:30Z" } + - do: + index: + index: twitter + type: tweet + id: 3 + body: { "level": 11, "last_updated": "2016-01-01T12:10:30Z" } + - do: + index: + index: twitter + type: tweet + id: 4 + body: { "level": 12, "last_updated": "2016-01-01T12:10:30Z" } + - do: + indices.refresh: {} + - do: + put_script: + id: "my_update_script" + body: { "script": {"lang": "painless", + "source": "int choice = ctx._source.level % 3; + if (choice == 0) { + ctx._source.last_updated = '2016-01-02T00:00:00Z'; + } else if (choice == 1) { + ctx.op = 'noop'; + } else { + ctx.op = 'delete'; + }" } } + - match: { acknowledged: true } + + - do: + update_by_query: + refresh: true + index: twitter + body: + script: + id: "my_update_script" + + - match: {updated: 2} + - match: {deleted: 1} + - match: {noops: 1} + + - do: + search: + index: twitter + body: + query: + match: + last_updated: "2016-01-02T00:00:00Z" + - match: { hits.total: 2 } + + - do: + search: + index: twitter + body: + query: + match: + last_updated: "2016-01-01T12:10:30Z" + - match: { hits.total: 1 } + + - do: + search: + index: twitter + body: + query: + term: + level: 11 + - match: { hits.total: 0 } From 41b54ccbd101db8b25426830d63fd4c753da1d40 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 15 Mar 2018 20:58:37 -0400 Subject: [PATCH 82/89] Clarify how to set compiler and runtime JDKs (#29101) This commit enhances the error messages reported when JAVA_HOME and RUNTIME_JAVA_HOME are not correctly set to point towards the minimum compiler and minimum runtime JDKs that are expected by the builds. The previous error message would say: Java 1.9 or above is required to build Elasticsearch which is confusing if the user does have a JDK 9 installation and is even the version that they have on their path yet they have JAVA_HOME pointing to another JDK installation. The error message reported after this change is: the environment variable JAVA_HOME must be set to a JDK installation directory for Java 1.9 but is [/usr/java/jdk-8] corresponding to [1.8] --- .../org/elasticsearch/gradle/BuildPlugin.groovy | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index b72d5696af720..6043ce210906a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -140,16 +140,22 @@ class BuildPlugin implements Plugin { final GradleVersion minGradle = GradleVersion.version('4.3') if (currentGradleVersion < minGradle) { - throw new GradleException("${minGradle} or above is required to build elasticsearch") + throw new GradleException("${minGradle} or above is required to build Elasticsearch") } // enforce Java version if (compilerJavaVersionEnum < minimumCompilerVersion) { - throw new GradleException("Java ${minimumCompilerVersion} or above is required to build Elasticsearch") + final String message = + "the environment variable JAVA_HOME must be set to a JDK installation directory for Java ${minimumCompilerVersion}" + + " but is [${compilerJavaHome}] corresponding to [${compilerJavaVersionEnum}]" + throw new GradleException(message) } if (runtimeJavaVersionEnum < minimumRuntimeVersion) { - throw new GradleException("Java ${minimumRuntimeVersion} or above is required to run Elasticsearch") + final String message = + "the environment variable RUNTIME_JAVA_HOME must be set to a JDK installation directory for Java ${minimumRuntimeVersion}" + + " but is [${runtimeJavaHome}] corresponding to [${runtimeJavaVersionEnum}]" + throw new GradleException(message) } project.rootProject.ext.compilerJavaHome = compilerJavaHome From 5d7b0dc2ede4d8d0a9034cc301db437859aa1277 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 15 Mar 2018 21:35:40 -0400 Subject: [PATCH 83/89] Allow overriding JVM options in Windows service (#29044) Today we allow any other method of starting Elastisearch to override jvm.options via ES_JAVA_OPTS. Yet, for some settings in the Windows service, we do not allow this. This commit removes this in favor of being consistent with other packaging choices. --- .../src/bin/elasticsearch-service.bat | 24 ------------------- 1 file changed, 24 deletions(-) diff --git a/distribution/src/bin/elasticsearch-service.bat b/distribution/src/bin/elasticsearch-service.bat index 065725f8bdb72..e4f3e92b084c4 100644 --- a/distribution/src/bin/elasticsearch-service.bat +++ b/distribution/src/bin/elasticsearch-service.bat @@ -120,50 +120,26 @@ echo %ES_JAVA_OPTS% for %%a in ("%ES_JAVA_OPTS:;=","%") do ( set var=%%a if "!var:~1,4!" == "-Xms" ( - if not "!JVM_MS!" == "" ( - echo duplicate min heap size settings found - goto:eof - ) set XMS=!var:~5,-1! call:convertxm !XMS! JVM_MS ) if "!var:~1,16!" == "-XX:MinHeapSize=" ( - if not "!JVM_MS!" == "" ( - echo duplicate min heap size settings found - goto:eof - ) set XMS=!var:~17,-1! call:convertxm !XMS! JVM_MS ) if "!var:~1,4!" == "-Xmx" ( - if not "!JVM_MX!" == "" ( - echo duplicate max heap size settings found - goto:eof - ) set XMX=!var:~5,-1! call:convertxm !XMX! JVM_MX ) if "!var:~1,16!" == "-XX:MaxHeapSize=" ( - if not "!JVM_MX!" == "" ( - echo duplicate max heap size settings found - goto:eof - ) set XMX=!var:~17,-1! call:convertxm !XMX! JVM_MX ) if "!var:~1,4!" == "-Xss" ( - if not "!JVM_SS!" == "" ( - echo duplicate thread stack size settings found - exit 1 - ) set XSS=!var:~5,-1! call:convertxk !XSS! JVM_SS ) if "!var:~1,20!" == "-XX:ThreadStackSize=" ( - if not "!JVM_SS!" == "" ( - echo duplicate thread stack size settings found - goto:eof - ) set XSS=!var:~21,-1! call:convertxk !XSS! JVM_SS ) From bd54917043db8efee2a26309190cdffcf29242e1 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 15 Mar 2018 20:36:00 +0100 Subject: [PATCH 84/89] Added minimal docs for reindex api in java-api docs Additionally: * Included the existing update by query java api docs in java-api docs. (for some reason it was never included, it needed some tweaking and then it was good to go) * moved delete-by-query / update-by-query code samples to java file so that we can verify that these samples at least compile. Closes #24203 --- docs/java-api/docs.asciidoc | 8 +- docs/java-api/docs/delete.asciidoc | 26 +-- docs/java-api/docs/reindex.asciidoc | 11 + docs/java-api/docs/update-by-query.asciidoc | 131 ++++-------- docs/java-api/index.asciidoc | 2 + .../documentation/ReindexDocumentationIT.java | 194 ++++++++++++++++++ 6 files changed, 255 insertions(+), 117 deletions(-) create mode 100644 docs/java-api/docs/reindex.asciidoc create mode 100644 modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java diff --git a/docs/java-api/docs.asciidoc b/docs/java-api/docs.asciidoc index c355714bdd636..181c5d8e0bd99 100644 --- a/docs/java-api/docs.asciidoc +++ b/docs/java-api/docs.asciidoc @@ -7,12 +7,14 @@ This section describes the following CRUD APIs: * <> * <> * <> -* <> * <> .Multi-document APIs * <> * <> +* <> +* <> +* <> NOTE: All CRUD APIs are single-index APIs. The `index` parameter accepts a single index name, or an `alias` which points to a single index. @@ -28,3 +30,7 @@ include::docs/update.asciidoc[] include::docs/multi-get.asciidoc[] include::docs/bulk.asciidoc[] + +include::docs/update-by-query.asciidoc[] + +include::docs/reindex.asciidoc[] \ No newline at end of file diff --git a/docs/java-api/docs/delete.asciidoc b/docs/java-api/docs/delete.asciidoc index 218ea14553b4c..9572c32c3a5d5 100644 --- a/docs/java-api/docs/delete.asciidoc +++ b/docs/java-api/docs/delete.asciidoc @@ -20,15 +20,9 @@ For more information on the delete operation, check out the The delete by query API allows one to delete a given set of documents based on the result of a query: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -BulkByScrollResponse response = - DeleteByQueryAction.INSTANCE.newRequestBuilder(client) - .filter(QueryBuilders.matchQuery("gender", "male")) <1> - .source("persons") <2> - .get(); <3> - -long deleted = response.getDeleted(); <4> +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[delete-by-query-sync] -------------------------------------------------- <1> query <2> index @@ -38,21 +32,9 @@ long deleted = response.getDeleted(); <4> As it can be a long running operation, if you wish to do it asynchronously, you can call `execute` instead of `get` and provide a listener like: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -DeleteByQueryAction.INSTANCE.newRequestBuilder(client) - .filter(QueryBuilders.matchQuery("gender", "male")) <1> - .source("persons") <2> - .execute(new ActionListener() { <3> - @Override - public void onResponse(BulkByScrollResponse response) { - long deleted = response.getDeleted(); <4> - } - @Override - public void onFailure(Exception e) { - // Handle the exception - } - }); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[delete-by-query-async] -------------------------------------------------- <1> query <2> index diff --git a/docs/java-api/docs/reindex.asciidoc b/docs/java-api/docs/reindex.asciidoc new file mode 100644 index 0000000000000..842e763f74d71 --- /dev/null +++ b/docs/java-api/docs/reindex.asciidoc @@ -0,0 +1,11 @@ +[[java-docs-reindex]] +=== Reindex API + +See {ref}/docs-reindex.html[reindex API]. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[reindex1] +-------------------------------------------------- +<1> Optionally a query can provided to filter what documents should be + re-indexed from the source to the target index. diff --git a/docs/java-api/docs/update-by-query.asciidoc b/docs/java-api/docs/update-by-query.asciidoc index 8b3d2d71c400a..ae4f8d72ee1d9 100644 --- a/docs/java-api/docs/update-by-query.asciidoc +++ b/docs/java-api/docs/update-by-query.asciidoc @@ -1,18 +1,13 @@ -[[docs-update-by-query]] -== Update By Query API +[[java-docs-update-by-query]] +=== Update By Query API The simplest usage of `updateByQuery` updates each document in an index without changing the source. This usage enables -<> or another online -mapping change. +picking up a new property or another online mapping change. -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("source_index").abortOnVersionConflict(false); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query] -------------------------------------------------- Calls to the `updateByQuery` API start by getting a snapshot of the index, indexing @@ -41,78 +36,50 @@ The `UpdateByQueryRequestBuilder` API supports filtering the updated documents, limiting the total number of documents to update, and updating documents with a script: -[source,java] --------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); -updateByQuery.source("source_index") - .filter(termQuery("level", "awesome")) - .size(1000) - .script(new Script("ctx._source.awesome = 'absolutely'", ScriptType.INLINE, "painless", emptyMap())); - -BulkByScrollResponse response = updateByQuery.get(); +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-filter] -------------------------------------------------- `UpdateByQueryRequestBuilder` also enables direct access to the query used to select the documents. You can use this access to change the default scroll size or otherwise modify the request for matching documents. -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("source_index") - .source().setSize(500); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-size] -------------------------------------------------- You can also combine `size` with sorting to limit the documents updated: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("source_index").size(100) - .source().addSort("cat", SortOrder.DESC); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-sort] -------------------------------------------------- In addition to changing the `_source` field for the document, you can use a script to change the action, similar to the Update API: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("source_index") - .script(new Script( - "if (ctx._source.awesome == 'absolutely) {" - + " ctx.op='noop' - + "} else if (ctx._source.awesome == 'lame') {" - + " ctx.op='delete'" - + "} else {" - + "ctx._source.awesome = 'absolutely'}", ScriptType.INLINE, "painless", emptyMap())); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-script] -------------------------------------------------- -As in the <>, you can set the value of `ctx.op` to change the +As in the <>, you can set the value of `ctx.op` to change the operation that executes: `noop`:: Set `ctx.op = "noop"` if your script doesn't make any changes. The `updateByQuery` operaton then omits that document from the updates. -This behavior increments the `noop` counter in the -<>. +This behavior increments the `noop` counter in the response body. `delete`:: Set `ctx.op = "delete"` if your script decides that the document must be deleted. The deletion will be reported in the `deleted` counter in the -<>. +response body. Setting `ctx.op` to any other value generates an error. Setting any other field in `ctx` generates an error. @@ -123,79 +90,55 @@ from its original location. You can also perform these operations on multiple indices and types at once, similar to the search API: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source("foo", "bar").source().setTypes("a", "b"); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-multi-index] -------------------------------------------------- If you provide a `routing` value then the process copies the routing value to the scroll query, limiting the process to the shards that match that routing value: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.source().setRouting("cat"); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-routing] -------------------------------------------------- -`updateByQuery` can also use the <> feature by +`updateByQuery` can also use the ingest node by specifying a `pipeline` like this: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); - -updateByQuery.setPipeline("hurray"); - -BulkByScrollResponse response = updateByQuery.get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-pipeline] -------------------------------------------------- [float] -[[docs-update-by-query-task-api]] +[[java-docs-update-by-query-task-api]] === Works with the Task API -You can fetch the status of all running update-by-query requests with the -<>: +You can fetch the status of all running update-by-query requests with the Task API: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -ListTasksResponse tasksList = client.admin().cluster().prepareListTasks() - .setActions(UpdateByQueryAction.NAME).setDetailed(true).get(); - -for (TaskInfo info: tasksList.getTasks()) { - TaskId taskId = info.getTaskId(); - BulkByScrollTask.Status status = (BulkByScrollTask.Status) info.getStatus(); - // do stuff -} - +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-list-tasks] -------------------------------------------------- With the `TaskId` shown above you can look up the task directly: // provide API Example -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -GetTaskResponse get = client.admin().cluster().prepareGetTask(taskId).get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-get-task] -------------------------------------------------- [float] -[[docs-update-by-query-cancel-task-api]] +[[java-docs-update-by-query-cancel-task-api]] === Works with the Cancel Task API -Any Update By Query can be canceled using the <>: +Any Update By Query can be canceled using the Task Cancel API: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -// Cancel all update-by-query requests -client.admin().cluster().prepareCancelTasks().setActions(UpdateByQueryAction.NAME).get().getTasks() -// Cancel a specific update-by-query request -client.admin().cluster().prepareCancelTasks().setTaskId(taskId).get().getTasks() +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-cancel-task] -------------------------------------------------- Use the `list tasks` API to find the value of `taskId`. @@ -204,14 +147,14 @@ Cancelling a request is typically a very fast process but can take up to a few s The task status API continues to list the task until the cancellation is complete. [float] -[[docs-update-by-query-rethrottle]] +[[java-docs-update-by-query-rethrottle]] === Rethrottling Use the `_rethrottle` API to change the value of `requests_per_second` on a running update: -[source,java] +["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -RethrottleAction.INSTANCE.newRequestBuilder(client).setTaskId(taskId).setRequestsPerSecond(2.0f).get(); +include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-rethrottle] -------------------------------------------------- Use the `list tasks` API to find the value of `taskId`. diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index c7d582ca98441..fb83f063272f7 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -152,6 +152,8 @@ and add it as a dependency. As an example, we will use the `slf4j-simple` logger :client-tests: {docdir}/../../server/src/test/java/org/elasticsearch/client/documentation +:client-reindex-tests: {docdir}/../../modules/reindex/src/test/java/org/elasticsearch/client/documentation + include::client.asciidoc[] include::docs.asciidoc[] diff --git a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java new file mode 100644 index 0000000000000..1f99f062d25af --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -0,0 +1,194 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.index.reindex.BulkByScrollTask; +import org.elasticsearch.index.reindex.DeleteByQueryAction; +import org.elasticsearch.index.reindex.ReindexAction; +import org.elasticsearch.index.reindex.ReindexRequest; +import org.elasticsearch.index.reindex.ReindexRequestBuilder; +import org.elasticsearch.index.reindex.RethrottleAction; +import org.elasticsearch.index.reindex.UpdateByQueryAction; +import org.elasticsearch.index.reindex.UpdateByQueryRequestBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Collections; + +public class ReindexDocumentationIT extends ESIntegTestCase { + + public void reindex() { + Client client = client(); + // tag::reindex1 + BulkByScrollResponse response = ReindexAction.INSTANCE.newRequestBuilder(client) + .destination("target_index") + .filter(QueryBuilders.matchQuery("category", "xzy")) // <1> + .get(); + // end::reindex1 + } + + public void updateByQuery() { + Client client = client(); + { + // tag::update-by-query + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index").abortOnVersionConflict(false); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query + } + { + // tag::update-by-query-filter + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index") + .filter(QueryBuilders.termQuery("level", "awesome")) + .size(1000) + .script(new Script(ScriptType.INLINE, "ctx._source.awesome = 'absolutely'", "painless", Collections.emptyMap())); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-filter + } + { + // tag::update-by-query-size + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index") + .source().setSize(500); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-size + } + { + // tag::update-by-query-sort + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index").size(100) + .source().addSort("cat", SortOrder.DESC); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-sort + } + { + // tag::update-by-query-script + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("source_index") + .script(new Script( + ScriptType.INLINE, + "if (ctx._source.awesome == 'absolutely) {" + + " ctx.op='noop'" + + "} else if (ctx._source.awesome == 'lame') {" + + " ctx.op='delete'" + + "} else {" + + "ctx._source.awesome = 'absolutely'}", + "painless", + Collections.emptyMap())); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-script + } + { + // tag::update-by-query-multi-index + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source("foo", "bar").source().setTypes("a", "b"); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-multi-index + } + { + // tag::update-by-query-routing + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.source().setRouting("cat"); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-routing + } + { + // tag::update-by-query-pipeline + UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + updateByQuery.setPipeline("hurray"); + BulkByScrollResponse response = updateByQuery.get(); + // end::update-by-query-pipeline + } + { + // tag::update-by-query-list-tasks + ListTasksResponse tasksList = client.admin().cluster().prepareListTasks() + .setActions(UpdateByQueryAction.NAME).setDetailed(true).get(); + for (TaskInfo info: tasksList.getTasks()) { + TaskId taskId = info.getTaskId(); + BulkByScrollTask.Status status = (BulkByScrollTask.Status) info.getStatus(); + // do stuff + } + // end::update-by-query-list-tasks + } + { + TaskId taskId = null; + // tag::update-by-query-get-task + GetTaskResponse get = client.admin().cluster().prepareGetTask(taskId).get(); + // end::update-by-query-get-task + } + { + TaskId taskId = null; + // tag::update-by-query-cancel-task + // Cancel all update-by-query requests + client.admin().cluster().prepareCancelTasks().setActions(UpdateByQueryAction.NAME).get().getTasks(); + // Cancel a specific update-by-query request + client.admin().cluster().prepareCancelTasks().setTaskId(taskId).get().getTasks(); + // end::update-by-query-cancel-task + } + { + TaskId taskId = null; + // tag::update-by-query-rethrottle + RethrottleAction.INSTANCE.newRequestBuilder(client) + .setTaskId(taskId) + .setRequestsPerSecond(2.0f) + .get(); + // end::update-by-query-rethrottle + } + } + + public void deleteByQuery() { + Client client = client(); + // tag::delete-by-query-sync + BulkByScrollResponse response = DeleteByQueryAction.INSTANCE.newRequestBuilder(client) + .filter(QueryBuilders.matchQuery("gender", "male")) // <1> + .source("persons") // <2> + .get(); // <3> + long deleted = response.getDeleted(); // <4> + // end::delete-by-query-sync + + // tag::delete-by-query-async + DeleteByQueryAction.INSTANCE.newRequestBuilder(client) + .filter(QueryBuilders.matchQuery("gender", "male")) // <1> + .source("persons") // <2> + .execute(new ActionListener() { // <3> + @Override + public void onResponse(BulkByScrollResponse response) { + long deleted = response.getDeleted(); // <4> + } + @Override + public void onFailure(Exception e) { + // Handle the exception + } + }); + // end::delete-by-query-async + } + +} From 3120049fe8a1261e2b45918959015b1ebe4f54a8 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 16 Mar 2018 10:20:56 +0100 Subject: [PATCH 85/89] Use removeTask instead of finishTask in PersistentTasksClusterService (#29055) The method `PersistentTasksClusterService.finishTask()` has been modified since it was added and does not use any `removeOncompletion` flag anymore. Its behavior is now similar to `removeTask()` and can be replaced by this one. When a non existing task is removed, the cluster state update task will fail and its `source` will still indicate `finish persistent task`/`remove persistent task`. --- .../PersistentTasksClusterService.java | 2 +- .../PersistentTasksCustomMetaData.java | 22 +++---------------- .../PersistentTasksCustomMetaDataTests.java | 11 +--------- 3 files changed, 5 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 24d8c5f7be31a..7c395365c1b88 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -117,7 +117,7 @@ public void completePersistentTask(String id, long allocationId, Exception failu public ClusterState execute(ClusterState currentState) throws Exception { PersistentTasksCustomMetaData.Builder tasksInProgress = builder(currentState); if (tasksInProgress.hasTask(id, allocationId)) { - tasksInProgress.finishTask(id); + tasksInProgress.removeTask(id); return update(currentState, tasksInProgress); } else { if (tasksInProgress.hasTask(id)) { diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java index 25b3567ac395d..ee45eb8ffad28 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java @@ -609,7 +609,7 @@ public Builder reassignTask(String taskId, Assignment assignment) { changed = true; tasks.put(taskId, new PersistentTask<>(taskInProgress, getNextAllocationId(), assignment)); } else { - throw new ResourceNotFoundException("cannot reassign task with id {" + taskId + "}, the task no longer exits"); + throw new ResourceNotFoundException("cannot reassign task with id {" + taskId + "}, the task no longer exists"); } return this; } @@ -623,7 +623,7 @@ public Builder updateTaskStatus(String taskId, Status status) { changed = true; tasks.put(taskId, new PersistentTask<>(taskInProgress, status)); } else { - throw new ResourceNotFoundException("cannot update task with id {" + taskId + "}, the task no longer exits"); + throw new ResourceNotFoundException("cannot update task with id {" + taskId + "}, the task no longer exists"); } return this; } @@ -635,23 +635,7 @@ public Builder removeTask(String taskId) { if (tasks.remove(taskId) != null) { changed = true; } else { - throw new ResourceNotFoundException("cannot remove task with id {" + taskId + "}, the task no longer exits"); - } - return this; - } - - /** - * Finishes the task - *

- * If the task is marked with removeOnCompletion flag, it is removed from the list, otherwise it is stopped. - */ - public Builder finishTask(String taskId) { - PersistentTask taskInProgress = tasks.get(taskId); - if (taskInProgress != null) { - changed = true; - tasks.remove(taskId); - } else { - throw new ResourceNotFoundException("cannot finish task with id {" + taskId + "}, the task no longer exits"); + throw new ResourceNotFoundException("cannot remove task with id {" + taskId + "}, the task no longer exists"); } return this; } diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java index 537fc21ed433f..67962b800d2cf 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java @@ -191,7 +191,7 @@ public void testBuilder() { } boolean changed = false; for (int j = 0; j < randomIntBetween(1, 10); j++) { - switch (randomInt(4)) { + switch (randomInt(3)) { case 0: lastKnownTask = addRandomTask(builder); changed = true; @@ -223,15 +223,6 @@ public void testBuilder() { expectThrows(ResourceNotFoundException.class, () -> builder.removeTask(fLastKnownTask)); } break; - case 4: - if (builder.hasTask(lastKnownTask)) { - changed = true; - builder.finishTask(lastKnownTask); - } else { - String fLastKnownTask = lastKnownTask; - expectThrows(ResourceNotFoundException.class, () -> builder.finishTask(fLastKnownTask)); - } - break; } } assertEquals(changed, builder.isChanged()); From 515b6440aeb1cbd8cf3f390fb72b8aa5b030f4f9 Mon Sep 17 00:00:00 2001 From: Martijn Laarman Date: Fri, 16 Mar 2018 12:28:24 +0100 Subject: [PATCH 86/89] Fix starting on Windows from another drive (#29086) The cd command on Windows has an oddity regarding changing directories. If the drive of the current directory is a different drive than than of the directory that was passed to the cd command, cd acts in query mode and does not change the current directory. Instead, a flag is needed to put the cd command into set mode so that the directory actually changes. This causes a problem when starting Elasticsearch from a directory different than the one where it is installed and this commit fixes the issue. --- distribution/src/bin/elasticsearch.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/src/bin/elasticsearch.bat b/distribution/src/bin/elasticsearch.bat index f9f668fc61538..e0f52c54c627f 100644 --- a/distribution/src/bin/elasticsearch.bat +++ b/distribution/src/bin/elasticsearch.bat @@ -50,7 +50,7 @@ if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" ( exit /b 1 ) -cd "%ES_HOME%" +cd /d "%ES_HOME%" %JAVA% %ES_JAVA_OPTS% -Delasticsearch -Des.path.home="%ES_HOME%" -Des.path.conf="%ES_PATH_CONF%" -cp "%ES_CLASSPATH%" "org.elasticsearch.bootstrap.Elasticsearch" !newparams! endlocal From 115c1fba9e038af6beb9c857f27731e4c994a1dc Mon Sep 17 00:00:00 2001 From: Jiri Tyr Date: Fri, 16 Mar 2018 13:13:17 +0000 Subject: [PATCH 87/89] [Docs] Fix link to Grok patterns (#29088) --- docs/reference/ingest/ingest-node.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 83fbbdce5371b..58964a50dac86 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1216,8 +1216,8 @@ expression that supports aliased expressions that can be reused. This tool is perfect for syslog logs, apache and other webserver logs, mysql logs, and in general, any log format that is generally written for humans and not computer consumption. -This processor comes packaged with over -https://github.com/elastic/elasticsearch/tree/master/modules/ingest-common/src/main/resources/patterns[120 reusable patterns]. +This processor comes packaged with many +https://github.com/elastic/elasticsearch/blob/{branch}/libs/grok/src/main/resources/patterns[reusable patterns]. If you need help building patterns to match your logs, you will find the {kibana-ref}/xpack-grokdebugger.html[Grok Debugger] tool quite useful! The Grok Debugger is an {xpack} feature under the Basic License and is therefore *free to use*. The Grok Constructor at is also a useful tool. From 073c7ab3ed6139fad3132669a761e7e5b2499812 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 16 Mar 2018 14:34:33 +0100 Subject: [PATCH 88/89] Clarify that dates are always rendered as strings. (#29093) Even in the case that the date was originally supplied as a long in the JSON document. Closes #26504 --- docs/reference/mapping/types/date.asciidoc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/reference/mapping/types/date.asciidoc b/docs/reference/mapping/types/date.asciidoc index bdce1d6c46fce..1beb64083b8c8 100644 --- a/docs/reference/mapping/types/date.asciidoc +++ b/docs/reference/mapping/types/date.asciidoc @@ -10,6 +10,13 @@ JSON doesn't have a date datatype, so dates in Elasticsearch can either be: Internally, dates are converted to UTC (if the time-zone is specified) and stored as a long number representing milliseconds-since-the-epoch. +Queries on dates are internally converted to range queries on this long +representation, and the result of aggregations and stored fields is converted +back to a string depending on the date format that is associated with the field. + +NOTE: Dates will always be rendered as strings, even if they were initially +supplied as a long in the JSON document. + Date formats can be customised, but if no `format` is specified then it uses the default: From eb7b46b01dfbc52f8000cb0c250164ce55e5bc22 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 16 Mar 2018 14:39:36 +0100 Subject: [PATCH 89/89] Clarify requirements of strict date formats. (#29090) Closes #29014 --- docs/reference/mapping/params/format.asciidoc | 18 ++++++++++++------ .../time/format/StrictISODateTimeFormat.java | 2 +- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/docs/reference/mapping/params/format.asciidoc b/docs/reference/mapping/params/format.asciidoc index 85cad16cb5390..7d621f875224e 100644 --- a/docs/reference/mapping/params/format.asciidoc +++ b/docs/reference/mapping/params/format.asciidoc @@ -44,12 +44,18 @@ http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html[i [[built-in-date-formats]] ==== Built In Formats -Most of the below dates have a `strict` companion dates, which means, that -year, month and day parts of the week must have prepending zeros in order -to be valid. This means, that a date like `5/11/1` would not be valid, but -you would need to specify the full date, which would be `2005/11/01` in this -example. So instead of `date_optional_time` you would need to specify -`strict_date_optional_time`. +Most of the below formats have a `strict` companion format, which means that +year, month and day parts of the week must use respectively 4, 2 and 2 digits +exactly, potentially prepending zeros. For instance a date like `5/11/1` would +be considered invalid and would need to be rewritten to `2005/11/01` to be +accepted by the date parser. + +To use them, you need to prepend `strict_` to the name of the date format, for +instance `strict_date_optional_time` instead of `date_optional_time`. + +These strict date formats are especially useful when +<> in order to make sure to +not accidentally map irrelevant strings as dates. The following tables lists all the defaults ISO formats supported: diff --git a/server/src/main/java/org/joda/time/format/StrictISODateTimeFormat.java b/server/src/main/java/org/joda/time/format/StrictISODateTimeFormat.java index 3692e9e0ad261..d7a05e344d0e8 100644 --- a/server/src/main/java/org/joda/time/format/StrictISODateTimeFormat.java +++ b/server/src/main/java/org/joda/time/format/StrictISODateTimeFormat.java @@ -27,7 +27,7 @@ * class is named ISODatetimeFormat * * However there has been done one huge modification in several methods, which forces the date - * year to be at least n digits, so that a year like "5" is invalid and must be "0005" + * year to be exactly n digits, so that a year like "5" is invalid and must be "0005" * * All methods have been marked with an "// ES change" commentary *