diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java
index 5c51aa17eec47..be2cb89162a9b 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java
@@ -24,6 +24,8 @@
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
+import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
+import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
@@ -1182,4 +1184,32 @@ public void getTemplateAsync(GetIndexTemplatesRequest getIndexTemplatesRequest,
restHighLevelClient.performRequestAsyncAndParseEntity(getIndexTemplatesRequest, RequestConverters::getTemplates,
options, GetIndexTemplatesResponse::fromXContent, listener, emptySet());
}
+
+ /**
+ * Calls the analyze API
+ *
+ * See Analyze API on elastic.co
+ *
+ * @param request the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ */
+ public AnalyzeResponse analyze(AnalyzeRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::analyze, options,
+ AnalyzeResponse::fromXContent, emptySet());
+ }
+
+ /**
+ * Asynchronously calls the analyze API
+ *
+ * See Analyze API on elastic.co
+ *
+ * @param request the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener the listener to be notified upon request completion
+ */
+ public void analyzeAsync(AnalyzeRequest request, RequestOptions options,
+ ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::analyze, options,
+ AnalyzeResponse::fromXContent, listener, emptySet());
+ }
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
index 25667d4f1e22a..dbf5851e39507 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
@@ -44,6 +44,7 @@
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
+import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
@@ -1009,6 +1010,18 @@ static Request getAlias(GetAliasesRequest getAliasesRequest) {
return request;
}
+ static Request analyze(AnalyzeRequest request) throws IOException {
+ EndpointBuilder builder = new EndpointBuilder();
+ String index = request.index();
+ if (index != null) {
+ builder.addPathPart(index);
+ }
+ builder.addPathPartAsIs("_analyze");
+ Request req = new Request(HttpGet.METHOD_NAME, builder.build());
+ req.setEntity(createEntity(request, REQUEST_BODY_CONTENT_TYPE));
+ return req;
+ }
+
static Request getScript(GetStoredScriptRequest getStoredScriptRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java
index ba910f91dc855..f94f8776ff1a2 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java
@@ -29,6 +29,8 @@
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
+import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
+import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
@@ -1320,4 +1322,20 @@ public void testGetIndexTemplate() throws Exception {
new GetIndexTemplatesRequest().names("the-template-*"), client.indices()::getTemplate, client.indices()::getTemplateAsync));
assertThat(notFound.status(), equalTo(RestStatus.NOT_FOUND));
}
+
+ public void testAnalyze() throws Exception {
+
+ RestHighLevelClient client = highLevelClient();
+
+ AnalyzeRequest noindexRequest = new AnalyzeRequest().text("One two three").analyzer("english");
+ AnalyzeResponse noindexResponse = execute(noindexRequest, client.indices()::analyze, client.indices()::analyzeAsync);
+
+ assertThat(noindexResponse.getTokens(), hasSize(3));
+
+ AnalyzeRequest detailsRequest = new AnalyzeRequest().text("One two three").analyzer("english").explain(true);
+ AnalyzeResponse detailsResponse = execute(detailsRequest, client.indices()::analyze, client.indices()::analyzeAsync);
+
+ assertNotNull(detailsResponse.detail());
+
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
index 368e246588c42..fc34fafc212d4 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
@@ -46,6 +46,7 @@
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
+import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
@@ -2219,6 +2220,22 @@ public void testGetTemplateRequest() throws Exception {
assertThat(request.getEntity(), nullValue());
}
+ public void testAnalyzeRequest() throws Exception {
+ AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest()
+ .text("Here is some text")
+ .index("test_index")
+ .analyzer("test_analyzer");
+
+ Request request = RequestConverters.analyze(indexAnalyzeRequest);
+ assertThat(request.getEndpoint(), equalTo("/test_index/_analyze"));
+ assertToXContentBody(indexAnalyzeRequest, request.getEntity());
+
+ AnalyzeRequest analyzeRequest = new AnalyzeRequest()
+ .text("more text")
+ .analyzer("test_analyzer");
+ assertThat(RequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze"));
+ }
+
public void testGetScriptRequest() {
GetStoredScriptRequest getStoredScriptRequest = new GetStoredScriptRequest("x-script");
Map expectedParams = new HashMap<>();
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java
index d5bc5f96395a4..4fbee55c104c5 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java
@@ -27,6 +27,9 @@
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
+import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
+import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
+import org.elasticsearch.action.admin.indices.analyze.DetailAnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
@@ -2315,4 +2318,127 @@ public void onFailure(Exception e) {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
+
+ public void testAnalyze() throws IOException, InterruptedException {
+
+ RestHighLevelClient client = highLevelClient();
+
+ {
+ // tag::analyze-builtin-request
+ AnalyzeRequest request = new AnalyzeRequest();
+ request.text("Some text to analyze", "Some more text to analyze"); // <1>
+ request.analyzer("english"); // <2>
+ // end::analyze-builtin-request
+ }
+
+ {
+ // tag::analyze-custom-request
+ AnalyzeRequest request = new AnalyzeRequest();
+ request.text("Some text to analyze");
+ request.addCharFilter("html_strip"); // <1>
+ request.tokenizer("standard"); // <2>
+ request.addTokenFilter("lowercase"); // <3>
+
+ Map stopFilter = new HashMap<>();
+ stopFilter.put("type", "stop");
+ stopFilter.put("stopwords", new String[]{ "to" }); // <4>
+ request.addTokenFilter(stopFilter); // <5>
+ // end::analyze-custom-request
+ }
+
+ {
+ // tag::analyze-custom-normalizer-request
+ AnalyzeRequest request = new AnalyzeRequest();
+ request.text("BaR");
+ request.addTokenFilter("lowercase");
+ // end::analyze-custom-normalizer-request
+
+ // tag::analyze-request-explain
+ request.explain(true); // <1>
+ request.attributes("keyword", "type"); // <2>
+ // end::analyze-request-explain
+
+ // tag::analyze-request-sync
+ AnalyzeResponse response = client.indices().analyze(request, RequestOptions.DEFAULT);
+ // end::analyze-request-sync
+
+ // tag::analyze-response-tokens
+ List tokens = response.getTokens(); // <1>
+ // end::analyze-response-tokens
+ // tag::analyze-response-detail
+ DetailAnalyzeResponse detail = response.detail(); // <1>
+ // end::analyze-response-detail
+
+ assertNull(tokens);
+ assertNotNull(detail.tokenizer());
+ }
+
+ CreateIndexRequest req = new CreateIndexRequest("my_index");
+ CreateIndexResponse resp = client.indices().create(req, RequestOptions.DEFAULT);
+ assertTrue(resp.isAcknowledged());
+
+ PutMappingRequest pmReq = new PutMappingRequest()
+ .indices("my_index")
+ .type("_doc")
+ .source("my_field", "type=text,analyzer=english");
+ PutMappingResponse pmResp = client.indices().putMapping(pmReq, RequestOptions.DEFAULT);
+ assertTrue(pmResp.isAcknowledged());
+
+ {
+ // tag::analyze-index-request
+ AnalyzeRequest request = new AnalyzeRequest();
+ request.index("my_index"); // <1>
+ request.analyzer("my_analyzer"); // <2>
+ request.text("some text to analyze");
+ // end::analyze-index-request
+
+ // tag::analyze-execute-listener
+ ActionListener listener = new ActionListener() {
+ @Override
+ public void onResponse(AnalyzeResponse analyzeTokens) {
+
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+
+ }
+ };
+ // end::analyze-execute-listener
+
+ // use a built-in analyzer in the test
+ request = new AnalyzeRequest();
+ request.index("my_index");
+ request.field("my_field");
+ request.text("some text to analyze");
+ // Use a blocking listener in the test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::analyze-request-async
+ client.indices().analyzeAsync(request, RequestOptions.DEFAULT, listener);
+ // end::analyze-request-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+
+ {
+ // tag::analyze-index-normalizer-request
+ AnalyzeRequest request = new AnalyzeRequest();
+ request.index("my_index"); // <1>
+ request.normalizer("my_normalizer"); // <2>
+ request.text("some text to analyze");
+ // end::analyze-index-normalizer-request
+ }
+
+ {
+ // tag::analyze-field-request
+ AnalyzeRequest request = new AnalyzeRequest();
+ request.index("my_index");
+ request.field("my_field");
+ request.text("some text to analyze");
+ // end::analyze-field-request
+ }
+
+ }
}
diff --git a/docs/java-rest/high-level/indices/analyze.asciidoc b/docs/java-rest/high-level/indices/analyze.asciidoc
new file mode 100644
index 0000000000000..4bffe2f020382
--- /dev/null
+++ b/docs/java-rest/high-level/indices/analyze.asciidoc
@@ -0,0 +1,119 @@
+[[java-rest-high-analyze]]
+=== Analyze API
+
+[[java-rest-high-analyze-request]]
+==== Analyze Request
+
+An `AnalyzeRequest` contains the text to analyze, and one of several options to
+specify how the analysis should be performed.
+
+The simplest version uses a built-in analyzer:
+
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-builtin-request]
+---------------------------------------------------
+<1> The text to include. Multiple strings are treated as a multi-valued field
+<2> A built-in analyzer
+
+You can configure a custom analyzer:
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-custom-request]
+---------------------------------------------------
+<1> Configure char filters
+<2> Configure the tokenizer
+<3> Add a built-in tokenfilter
+<4> Configuration for a custom tokenfilter
+<5> Add the custom tokenfilter
+
+You can also build a custom normalizer, by including only charfilters and
+tokenfilters:
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-custom-normalizer-request]
+---------------------------------------------------
+
+You can analyze text using an analyzer defined in an existing index:
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-index-request]
+---------------------------------------------------
+<1> The index containing the mappings
+<2> The analyzer defined on this index to use
+
+Or you can use a normalizer:
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-index-normalizer-request]
+---------------------------------------------------
+<1> The index containing the mappings
+<2> The normalizer defined on this index to use
+
+You can analyze text using the mappings for a particular field in an index:
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-field-request]
+---------------------------------------------------
+
+==== Optional arguemnts
+The following arguments can also optionally be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-explain]
+---------------------------------------------------
+<1> Setting `explain` to true will add further details to the response
+<2> Setting `attributes` allows you to return only token attributes that you are
+interested in
+
+[[java-rest-high-analyze-sync]]
+==== Synchronous Execution
+
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-sync]
+---------------------------------------------------
+
+[[java-rest-high-analyze-async]]
+==== Asynchronous Execution
+
+The asynchronous execution of an analyze request requires both the `AnalyzeRequest`
+instance and an `ActionListener` instance to be passed to the asyncronous method:
+
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-async]
+---------------------------------------------------
+
+The asynchronous method does not block and returns immediately. Once it is
+completed the `ActionListener` is called back using the `onResponse` method if the
+execution successfully completed or using the `onFailure` method if it failed.
+
+A typical listener for `AnalyzeResponse` looks like:
+
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-execute-listener]
+---------------------------------------------------
+
+[[java-rest-high-analyze-response]]
+==== Analyze Response
+
+The returned `AnalyzeResponse` allows you to retrieve details of the analysis as
+follows:
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-response-tokens]
+---------------------------------------------------
+<1> `AnalyzeToken` holds information about the individual tokens produced by analysis
+
+If `explain` was set to `true`, then information is instead returned from the `detail()`
+method:
+
+["source","java",subs="attributes,callouts,macros"]
+---------------------------------------------------
+include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-response-detail]
+---------------------------------------------------
+<1> `DetailAnalyzeResponse` holds more detailed information about tokens produced by
+the various substeps in the analysis chain.
\ No newline at end of file
diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc
index d1ccddf2429c5..9a7d28512bb28 100644
--- a/docs/java-rest/high-level/supported-apis.asciidoc
+++ b/docs/java-rest/high-level/supported-apis.asciidoc
@@ -88,6 +88,7 @@ Alias Management::
* <>
* <>
+include::indices/analyze.asciidoc[]
include::indices/create_index.asciidoc[]
include::indices/delete_index.asciidoc[]
include::indices/indices_exists.asciidoc[]
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java
index d9c018848d7e8..09686025e9da9 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java
@@ -26,6 +26,8 @@
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.ToXContentFragment;
+import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
@@ -42,7 +44,7 @@
* A request to analyze a text associated with a specific index. Allow to provide
* the actual analyzer name to perform the analysis with.
*/
-public class AnalyzeRequest extends SingleShardRequest {
+public class AnalyzeRequest extends SingleShardRequest implements ToXContentObject {
private String[] text;
@@ -62,7 +64,7 @@ public class AnalyzeRequest extends SingleShardRequest {
private String normalizer;
- public static class NameOrDefinition implements Writeable {
+ public static class NameOrDefinition implements Writeable, ToXContentFragment {
// exactly one of these two members is not null
public final String name;
public final Settings definition;
@@ -102,6 +104,15 @@ public void writeTo(StreamOutput out) throws IOException {
Settings.writeSettingsToStream(definition, out);
}
}
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ if (definition == null) {
+ return builder.value(name);
+ }
+ return definition.toXContent(builder, params);
+ }
+
}
public AnalyzeRequest() {
@@ -171,6 +182,7 @@ public AnalyzeRequest addCharFilter(String charFilter) {
this.charFilters.add(new NameOrDefinition(charFilter));
return this;
}
+
public List charFilters() {
return this.charFilters;
}
@@ -260,4 +272,36 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(normalizer);
}
}
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field("text", text);
+ if (Strings.isNullOrEmpty(analyzer) == false) {
+ builder.field("analyzer", analyzer);
+ }
+ if (tokenizer != null) {
+ tokenizer.toXContent(builder, params);
+ }
+ if (tokenFilters.size() > 0) {
+ builder.field("filter", tokenFilters);
+ }
+ if (charFilters.size() > 0) {
+ builder.field("char_filter", charFilters);
+ }
+ if (Strings.isNullOrEmpty(field) == false) {
+ builder.field("field", field);
+ }
+ if (explain) {
+ builder.field("explain", true);
+ }
+ if (attributes.length > 0) {
+ builder.field("attributes", attributes);
+ }
+ if (Strings.isNullOrEmpty(normalizer) == false) {
+ builder.field("normalizer", normalizer);
+ }
+ return builder.endObject();
+ }
+
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java
index 1e54def2385f8..d45ab2682a5ec 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java
@@ -20,17 +20,27 @@
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
+import java.util.TreeMap;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
+import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class AnalyzeResponse extends ActionResponse implements Iterable, ToXContentObject {
@@ -46,6 +56,25 @@ public static class AnalyzeToken implements Streamable, ToXContentObject {
AnalyzeToken() {
}
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AnalyzeToken that = (AnalyzeToken) o;
+ return startOffset == that.startOffset &&
+ endOffset == that.endOffset &&
+ position == that.position &&
+ positionLength == that.positionLength &&
+ Objects.equals(term, that.term) &&
+ Objects.equals(attributes, that.attributes) &&
+ Objects.equals(type, that.type);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type);
+ }
+
public AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength,
String type, Map attributes) {
this.term = term;
@@ -97,7 +126,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(Fields.POSITION_LENGTH, positionLength);
}
if (attributes != null && !attributes.isEmpty()) {
- for (Map.Entry entity : attributes.entrySet()) {
+ Map sortedAttributes = new TreeMap<>(attributes);
+ for (Map.Entry entity : sortedAttributes.entrySet()) {
builder.field(entity.getKey(), entity.getValue());
}
}
@@ -111,6 +141,50 @@ public static AnalyzeToken readAnalyzeToken(StreamInput in) throws IOException {
return analyzeToken;
}
+ public static AnalyzeToken fromXContent(XContentParser parser) throws IOException {
+ ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
+ String field = null;
+ String term = "";
+ int position = -1;
+ int startOffset = -1;
+ int endOffset = -1;
+ int positionLength = 1;
+ String type = "";
+ Map attributes = new HashMap<>();
+ for (XContentParser.Token t = parser.nextToken(); t != XContentParser.Token.END_OBJECT; t = parser.nextToken()) {
+ if (t == XContentParser.Token.FIELD_NAME) {
+ field = parser.currentName();
+ continue;
+ }
+ if (Fields.TOKEN.equals(field)) {
+ term = parser.text();
+ } else if (Fields.POSITION.equals(field)) {
+ position = parser.intValue();
+ } else if (Fields.START_OFFSET.equals(field)) {
+ startOffset = parser.intValue();
+ } else if (Fields.END_OFFSET.equals(field)) {
+ endOffset = parser.intValue();
+ } else if (Fields.POSITION_LENGTH.equals(field)) {
+ positionLength = parser.intValue();
+ } else if (Fields.TYPE.equals(field)) {
+ type = parser.text();
+ } else {
+ if (t == XContentParser.Token.VALUE_STRING) {
+ attributes.put(field, parser.text());
+ } else if (t == XContentParser.Token.VALUE_NUMBER) {
+ attributes.put(field, parser.numberValue());
+ } else if (t == XContentParser.Token.VALUE_BOOLEAN) {
+ attributes.put(field, parser.booleanValue());
+ } else if (t == XContentParser.Token.START_OBJECT) {
+ attributes.put(field, parser.map());
+ } else if (t == XContentParser.Token.START_ARRAY) {
+ attributes.put(field, parser.list());
+ }
+ }
+ }
+ return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes);
+ }
+
@Override
public void readFrom(StreamInput in) throws IOException {
term = in.readString();
@@ -125,8 +199,11 @@ public void readFrom(StreamInput in) throws IOException {
positionLength = 1;
}
}
+ else {
+ positionLength = 1;
+ }
type = in.readOptionalString();
- attributes = (Map) in.readGenericValue();
+ attributes = in.readMap();
}
@Override
@@ -139,7 +216,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalVInt(positionLength > 1 ? positionLength : null);
}
out.writeOptionalString(type);
- out.writeGenericValue(attributes);
+ out.writeMapWithConsistentOrder(attributes);
}
}
@@ -188,6 +265,17 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
return builder;
}
+ private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("analyze_response",
+ true, args -> new AnalyzeResponse((List) args[0], (DetailAnalyzeResponse) args[1]));
+ static {
+ PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AnalyzeToken.fromXContent(p), new ParseField(Fields.TOKENS));
+ PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(Fields.DETAIL));
+ }
+
+ public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
@@ -196,6 +284,9 @@ public void readFrom(StreamInput in) throws IOException {
for (int i = 0; i < size; i++) {
tokens.add(AnalyzeToken.readAnalyzeToken(in));
}
+ if (tokens.size() == 0) {
+ tokens = null;
+ }
detail = in.readOptionalStreamable(DetailAnalyzeResponse::new);
}
@@ -213,6 +304,25 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalStreamable(detail);
}
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AnalyzeResponse that = (AnalyzeResponse) o;
+ return Objects.equals(detail, that.detail) &&
+ Objects.equals(tokens, that.tokens);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(detail, tokens);
+ }
+
+ @Override
+ public String toString() {
+ return Strings.toString(this, true, true);
+ }
+
static final class Fields {
static final String TOKENS = "tokens";
static final String TOKEN = "token";
diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java
index c080a01a98168..1e0c4ed525ef1 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java
@@ -20,20 +20,27 @@
package org.elasticsearch.action.admin.indices.analyze;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
+import java.lang.reflect.Array;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
-public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
- DetailAnalyzeResponse() {
- }
+public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
private boolean customAnalyzer = false;
private AnalyzeTokenList analyzer;
@@ -41,6 +48,9 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
private AnalyzeTokenList tokenizer;
private AnalyzeTokenList[] tokenfilters;
+ DetailAnalyzeResponse() {
+ }
+
public DetailAnalyzeResponse(AnalyzeTokenList analyzer) {
this(false, analyzer, null, null, null);
}
@@ -66,6 +76,7 @@ public AnalyzeTokenList analyzer() {
}
public DetailAnalyzeResponse analyzer(AnalyzeTokenList analyzer) {
+ this.customAnalyzer = false;
this.analyzer = analyzer;
return this;
}
@@ -75,6 +86,7 @@ public CharFilteredText[] charfilters() {
}
public DetailAnalyzeResponse charfilters(CharFilteredText[] charfilters) {
+ this.customAnalyzer = true;
this.charfilters = charfilters;
return this;
}
@@ -84,6 +96,7 @@ public AnalyzeTokenList tokenizer() {
}
public DetailAnalyzeResponse tokenizer(AnalyzeTokenList tokenizer) {
+ this.customAnalyzer = true;
this.tokenizer = tokenizer;
return this;
}
@@ -93,10 +106,31 @@ public AnalyzeTokenList[] tokenfilters() {
}
public DetailAnalyzeResponse tokenfilters(AnalyzeTokenList[] tokenfilters) {
+ this.customAnalyzer = true;
this.tokenfilters = tokenfilters;
return this;
}
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DetailAnalyzeResponse that = (DetailAnalyzeResponse) o;
+ return customAnalyzer == that.customAnalyzer &&
+ Objects.equals(analyzer, that.analyzer) &&
+ Arrays.equals(charfilters, that.charfilters) &&
+ Objects.equals(tokenizer, that.tokenizer) &&
+ Arrays.equals(tokenfilters, that.tokenfilters);
+ }
+
+ @Override
+ public int hashCode() {
+ int result = Objects.hash(customAnalyzer, analyzer, tokenizer);
+ result = 31 * result + Arrays.hashCode(charfilters);
+ result = 31 * result + Arrays.hashCode(tokenfilters);
+ return result;
+ }
+
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.CUSTOM_ANALYZER, customAnalyzer);
@@ -131,6 +165,32 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
return builder;
}
+ @SuppressWarnings("unchecked")
+ private static T[] fromList(Class clazz, List list) {
+ if (list == null) {
+ return null;
+ }
+ return list.toArray((T[])Array.newInstance(clazz, 0));
+ }
+
+ static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("detail",
+ true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1],
+ fromList(CharFilteredText.class, (List)args[2]),
+ (AnalyzeTokenList) args[3],
+ fromList(AnalyzeTokenList.class, (List)args[4])));
+
+ static {
+ PARSER.declareBoolean(constructorArg(), new ParseField(Fields.CUSTOM_ANALYZER));
+ PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.ANALYZER));
+ PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField(Fields.CHARFILTERS));
+ PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENIZER));
+ PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENFILTERS));
+ }
+
+ public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
static final class Fields {
static final String NAME = "name";
static final String FILTERED_TEXT = "filtered_text";
@@ -195,6 +255,22 @@ public static class AnalyzeTokenList implements Streamable, ToXContentObject {
private String name;
private AnalyzeResponse.AnalyzeToken[] tokens;
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AnalyzeTokenList that = (AnalyzeTokenList) o;
+ return Objects.equals(name, that.name) &&
+ Arrays.equals(tokens, that.tokens);
+ }
+
+ @Override
+ public int hashCode() {
+ int result = Objects.hash(name);
+ result = 31 * result + Arrays.hashCode(tokens);
+ return result;
+ }
+
AnalyzeTokenList() {
}
@@ -235,6 +311,20 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
return builder;
}
+ private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token_list",
+ true, args -> new AnalyzeTokenList((String) args[0],
+ fromList(AnalyzeResponse.AnalyzeToken.class, (List)args[1])));
+
+ static {
+ PARSER.declareString(constructorArg(), new ParseField(Fields.NAME));
+ PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p),
+ new ParseField(AnalyzeResponse.Fields.TOKENS));
+ }
+
+ public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
@@ -264,6 +354,7 @@ public void writeTo(StreamOutput out) throws IOException {
public static class CharFilteredText implements Streamable, ToXContentObject {
private String name;
private String[] texts;
+
CharFilteredText() {
}
@@ -293,6 +384,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
return builder;
}
+ private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("char_filtered_text",
+ true, args -> new CharFilteredText((String) args[0], ((List) args[1]).toArray(new String[0])));
+
+ static {
+ PARSER.declareString(constructorArg(), new ParseField(Fields.NAME));
+ PARSER.declareStringArray(constructorArg(), new ParseField(Fields.FILTERED_TEXT));
+ }
+
+ public static CharFilteredText fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
public static CharFilteredText readCharFilteredText(StreamInput in) throws IOException {
CharFilteredText text = new CharFilteredText();
text.readFrom(in);
@@ -310,5 +413,21 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeStringArray(texts);
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CharFilteredText that = (CharFilteredText) o;
+ return Objects.equals(name, that.name) &&
+ Arrays.equals(texts, that.texts);
+ }
+
+ @Override
+ public int hashCode() {
+ int result = Objects.hash(name);
+ result = 31 * result + Arrays.hashCode(texts);
+ return result;
+ }
}
}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java
new file mode 100644
index 0000000000000..404db74a46e12
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.indices.analyze;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractStreamableXContentTestCase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Predicate;
+
+public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase {
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ return s -> s.contains("tokens.");
+ }
+
+ @Override
+ protected AnalyzeResponse doParseInstance(XContentParser parser) throws IOException {
+ return AnalyzeResponse.fromXContent(parser);
+ }
+
+ @Override
+ protected AnalyzeResponse createBlankInstance() {
+ return new AnalyzeResponse();
+ }
+
+ @Override
+ protected AnalyzeResponse createTestInstance() {
+ int tokenCount = randomIntBetween(1, 30);
+ AnalyzeResponse.AnalyzeToken[] tokens = new AnalyzeResponse.AnalyzeToken[tokenCount];
+ for (int i = 0; i < tokenCount; i++) {
+ tokens[i] = randomToken();
+ }
+ DetailAnalyzeResponse dar = null;
+ if (randomBoolean()) {
+ dar = new DetailAnalyzeResponse();
+ if (randomBoolean()) {
+ dar.charfilters(new DetailAnalyzeResponse.CharFilteredText[]{
+ new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"})
+ });
+ }
+ dar.tokenizer(new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens));
+ if (randomBoolean()) {
+ dar.tokenfilters(new DetailAnalyzeResponse.AnalyzeTokenList[]{
+ new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens),
+ new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens)
+ });
+ }
+ return new AnalyzeResponse(null, dar);
+ }
+ return new AnalyzeResponse(Arrays.asList(tokens), null);
+ }
+
+ private AnalyzeResponse.AnalyzeToken randomToken() {
+ String token = randomAlphaOfLengthBetween(1, 20);
+ int position = randomIntBetween(0, 1000);
+ int startOffset = randomIntBetween(0, 1000);
+ int endOffset = randomIntBetween(0, 1000);
+ int posLength = randomIntBetween(1, 5);
+ String type = randomAlphaOfLengthBetween(1, 20);
+ Map extras = new HashMap<>();
+ if (randomBoolean()) {
+ int entryCount = randomInt(6);
+ for (int i = 0; i < entryCount; i++) {
+ switch (randomInt(6)) {
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ String key = randomAlphaOfLength(5);
+ String value = randomAlphaOfLength(10);
+ extras.put(key, value);
+ break;
+ case 4:
+ String objkey = randomAlphaOfLength(5);
+ Map obj = new HashMap<>();
+ obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10));
+ extras.put(objkey, obj);
+ break;
+ case 5:
+ String listkey = randomAlphaOfLength(5);
+ List list = new ArrayList<>();
+ list.add(randomAlphaOfLength(4));
+ list.add(randomAlphaOfLength(6));
+ extras.put(listkey, list);
+ break;
+ }
+ }
+ }
+ return new AnalyzeResponse.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras);
+ }
+}