Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Mapping: Return _boost and _analyzer in the GET field mapping API #7589

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,36 +20,52 @@
package org.elasticsearch.index.mapper.internal;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.search.highlight.HighlighterContext;

import java.io.IOException;
import java.util.List;
import java.util.Map;

import static org.elasticsearch.index.mapper.MapperBuilders.analyzer;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;

/**
*
*/
public class AnalyzerMapper implements Mapper, InternalMapper, RootMapper {
public class AnalyzerMapper extends AbstractFieldMapper<String> implements InternalMapper, RootMapper {

public static final String NAME = "_analyzer";
public static final String CONTENT_TYPE = "_analyzer";

public static class Defaults {
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String PATH = "_analyzer";
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
}

public static class Builder extends Mapper.Builder<Builder, AnalyzerMapper> {
@Override
public String value(Object value) {
return (String) value;
}

public static class Builder extends AbstractFieldMapper.Builder<Builder, AnalyzerMapper> {

private String field = Defaults.PATH;

public Builder() {
super(CONTENT_TYPE);
super(CONTENT_TYPE, new FieldType(Defaults.FIELD_TYPE));
this.builder = this;
}

Expand All @@ -60,14 +76,15 @@ public Builder field(String field) {

@Override
public AnalyzerMapper build(BuilderContext context) {
return new AnalyzerMapper(field);
return new AnalyzerMapper(field, fieldType);
}
}

public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
AnalyzerMapper.Builder builder = analyzer();
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
Expand All @@ -82,16 +99,28 @@ public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext
private final String path;

public AnalyzerMapper() {
this(Defaults.PATH);
this(Defaults.PATH, Defaults.FIELD_TYPE);
}

protected AnalyzerMapper(String path, FieldType fieldType) {
this(path, Defaults.BOOST, fieldType, null, null, null, null);
}

public AnalyzerMapper(String path) {
public AnalyzerMapper(String path, float boost, FieldType fieldType, PostingsFormatProvider postingsProvider,
DocValuesFormatProvider docValuesProvider, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(path, path, NAME, NAME), boost, fieldType, null, Lucene.KEYWORD_ANALYZER,
Lucene.KEYWORD_ANALYZER, postingsProvider, docValuesProvider, null, null, fieldDataSettings, indexSettings);
this.path = path.intern();
}

@Override
public String name() {
return CONTENT_TYPE;
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}

@Override
public FieldDataType defaultFieldDataType() {
return new FieldDataType("string");
}

@Override
Expand All @@ -100,34 +129,15 @@ public void preParse(ParseContext context) throws IOException {

@Override
public void postParse(ParseContext context) throws IOException {
Analyzer analyzer = context.docMapper().mappers().indexAnalyzer();
if (path != null) {
String value = null;
List<IndexableField> fields = context.doc().getFields();
for (int i = 0, fieldsSize = fields.size(); i < fieldsSize; i++) {
IndexableField field = fields.get(i);
if (field.name().equals(path)) {
value = field.stringValue();
break;
}
}
if (value == null) {
value = context.ignoredValue(path);
}
if (value != null) {
analyzer = context.analysisService().analyzer(value);
if (analyzer == null) {
throw new MapperParsingException("No analyzer found for [" + value + "] from path [" + path + "]");
}
analyzer = context.docMapper().mappers().indexAnalyzer(analyzer);
}
if (context.analyzer() == null) {
Analyzer analyzer = context.docMapper().mappers().indexAnalyzer();
context.analyzer(analyzer);
}
context.analyzer(analyzer);
}

@Override
public boolean includeInObject() {
return false;
return true;
}

public Analyzer setAnalyzer(HighlighterContext context){
Expand All @@ -151,36 +161,50 @@ public Analyzer setAnalyzer(HighlighterContext context){
}

@Override
public void parse(ParseContext context) throws IOException {
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
String value = context.parser().textOrNull();
if (fieldType().indexed()) {
fields.add(new StringField(context.parser().currentName(), value, Field.Store.NO));
} else {
context.ignoredValue(context.parser().currentName(), value);
}
Analyzer analyzer = context.docMapper().mappers().indexAnalyzer();
if (value != null) {
analyzer = context.analysisService().analyzer(value);
if (analyzer == null) {
throw new MapperParsingException("No analyzer found for [" + value + "] from path [" + path + "]");
}
analyzer = context.docMapper().mappers().indexAnalyzer(analyzer);
}
context.analyzer(analyzer);
}

@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
}

@Override
public void traverse(FieldMapperListener fieldMapperListener) {
}

@Override
public void traverse(ObjectMapperListener objectMapperListener) {
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (path.equals(Defaults.PATH)) {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
if (path.equals(Defaults.PATH) && fieldType.indexed() == Defaults.FIELD_TYPE.indexed() &&
fieldType.stored() == Defaults.FIELD_TYPE.stored() && !includeDefaults) {
return builder;
}
builder.startObject(CONTENT_TYPE);
if (!path.equals(Defaults.PATH)) {
if (includeDefaults || !path.equals(Defaults.PATH)) {
builder.field("path", path);
}
if (includeDefaults || !(fieldType.indexed() == Defaults.FIELD_TYPE.indexed() &&
fieldType.stored() == Defaults.FIELD_TYPE.stored())) {
builder.field("index", indexTokenizeOptionToString(fieldType.indexed(), fieldType.tokenized()));
}
builder.endObject();
return builder;
}

@Override
public void close() {

protected String contentType() {
return CONTENT_TYPE;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ public Builder nullValue(float nullValue) {

@Override
public BoostFieldMapper build(BuilderContext context) {
return new BoostFieldMapper(name, buildIndexName(context),
return new BoostFieldMapper(name,
fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, postingsProvider, docValuesProvider, fieldDataSettings, context.indexSettings());
}
}
Expand All @@ -114,17 +114,13 @@ public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserCo
private final Float nullValue;

public BoostFieldMapper() {
this(Defaults.NAME, Defaults.NAME);
}

protected BoostFieldMapper(String name, String indexName) {
this(name, indexName, Defaults.PRECISION_STEP_32_BIT, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null,
this(Defaults.NAME, Defaults.PRECISION_STEP_32_BIT, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null,
Defaults.NULL_VALUE, null, null, null, ImmutableSettings.EMPTY);
}

protected BoostFieldMapper(String name, String indexName, int precisionStep, float boost, FieldType fieldType, Boolean docValues, Float nullValue,
protected BoostFieldMapper(String name, int precisionStep, float boost, FieldType fieldType, Boolean docValues, Float nullValue,
PostingsFormatProvider postingsProvider, DocValuesFormatProvider docValuesProvider, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), precisionStep, boost, fieldType, docValues, Defaults.IGNORE_MALFORMED, Defaults.COERCE,
super(new Names(name, name, Defaults.NAME, Defaults.NAME), precisionStep, boost, fieldType, docValues, Defaults.IGNORE_MALFORMED, Defaults.COERCE,
NumericFloatAnalyzer.buildNamedAnalyzer(precisionStep), NumericFloatAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE),
postingsProvider, docValuesProvider, null, null, fieldDataSettings, indexSettings, MultiFields.empty(), null);
this.nullValue = nullValue;
Expand Down Expand Up @@ -240,24 +236,18 @@ public boolean includeInObject() {
return true;
}

@Override
public void parse(ParseContext context) throws IOException {
// we override parse since we want to handle cases where it is not indexed and not stored (the default)
float value = parseFloatValue(context);
if (!Float.isNaN(value)) {
context.docBoost(value);
}
super.parse(context);
}

@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
final float value = parseFloatValue(context);
if (Float.isNaN(value)) {
return;
}
if (fieldType().indexed() || fieldType().stored()) {
fields.add(new FloatFieldMapper.CustomFloatNumericField(this, value, fieldType));
} else {
context.ignoredValue(context.parser().currentName(), context.parser().textOrNull());
}
context.docBoost(value);
fields.add(new FloatFieldMapper.CustomFloatNumericField(this, value, fieldType));
}

private float parseFloatValue(ParseContext context) throws IOException {
Expand Down
36 changes: 36 additions & 0 deletions src/test/java/org/elasticsearch/get/GetActionTests.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
Expand Down Expand Up @@ -1020,6 +1021,41 @@ public void testUngeneratedFieldsPartOfSourceUnstoredSourceDisabled() throws IOE
assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList);
}

@Test
public void testBoostAnalyzerFieldDefaultPath() throws IOException {
boolean stored = randomBoolean();
indexSingleDocumentWithBoostAndAnalyzer(stored);
String[] fieldsList = {"_boost", "_analyzer"};
// before refresh - document is only in translog
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList);
refresh();
//after refresh - document is in translog and also indexed
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList);
flush();
//after flush - document is in not anymore translog - only indexed
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList);
}

void indexSingleDocumentWithBoostAndAnalyzer(boolean stored) throws IOException {
XContentBuilder createIndexSource = jsonBuilder().startObject()
.startObject("settings")
.field("index.translog.disable_flush", true)
.field("refresh_interval", -1)
.endObject()
.startObject("mappings")
.startObject("doc")
.startObject("_boost")
.field("null_nalue",1).field("store", stored)
.endObject()
.endObject()
.endObject()
.endObject();
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource));
ensureGreen();
XContentBuilder doc = jsonBuilder().startObject().field("_boost", 5.0).field("_analyzer", "whitespace").endObject();
client().prepareIndex("test", "doc").setId("1").setSource(doc).get();
}

@Test
public void testUngeneratedFieldsPartOfSourceEitherStoredOrSourceEnabled() throws IOException {
boolean stored = randomBoolean();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.index.mapper.analyzer;

import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;

/**
*
*/
public class AnalyzerMapperIntegrationTests extends ElasticsearchIntegrationTest {

@Test
public void testAnalyzerMappingAppliedToDocs() throws Exception {

String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_analyzer").field("path", "field_analyzer").endObject()
.startObject("properties")
.startObject("text").field("type", "string").endObject()
.endObject()
.endObject().endObject().string();
prepareCreate("test").addMapping("type", mapping).get();
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("text", "foo bar").field("field_analyzer", "keyword");
client().prepareIndex("test", "type").setSource(doc).get();
client().admin().indices().prepareRefresh("test").get();
SearchResponse response = client().prepareSearch("test").setQuery(QueryBuilders.termQuery("text", "foo bar")).get();
assertThat(response.getHits().totalHits(), equalTo(1l));

response = client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field_analyzer", "keyword")).get();
assertThat(response.getHits().totalHits(), equalTo(1l));
}


}
Loading