diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 43680db75678d..ad904c6b29107 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -57,7 +57,6 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.Version; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; @@ -80,7 +79,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -92,6 +90,7 @@ public class TextFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "text"; private static final String FAST_PHRASE_SUFFIX = "._index_phrase"; + private static final String FAST_PREFIX_SUFFIX = "._index_prefix"; public static class Defaults { public static final double FIELDDATA_MIN_FREQUENCY = 0; @@ -332,7 +331,7 @@ private TextFieldType buildFieldType(FieldType fieldType, ContentPath contentPat return ft; } - private PrefixFieldMapper buildPrefixMapper(ContentPath contentPath, FieldType fieldType, TextFieldType tft) { + private SubFieldInfo buildPrefixInfo(ContentPath contentPath, FieldType fieldType, TextFieldType tft) { if (indexPrefixes.get() == null) { return null; } @@ -360,16 +359,15 @@ private PrefixFieldMapper buildPrefixMapper(ContentPath contentPath, FieldType f if (fieldType.storeTermVectorOffsets()) { pft.setStoreTermVectorOffsets(true); } - PrefixFieldType prefixFieldType = new PrefixFieldType(tft, fullName + "._index_prefix", indexPrefixes.get()); - tft.setPrefixFieldType(prefixFieldType); - return new PrefixFieldMapper(pft, prefixFieldType, new PrefixWrappedAnalyzer( + tft.setIndexPrefixes(indexPrefixes.get().minChars, indexPrefixes.get().maxChars); + return new SubFieldInfo(fullName + "._index_prefix", pft, new PrefixWrappedAnalyzer( analyzers.getIndexAnalyzer().analyzer(), analyzers.positionIncrementGap.get(), - prefixFieldType.minChars, - prefixFieldType.maxChars)); + indexPrefixes.get().minChars, + indexPrefixes.get().maxChars)); } - private PhraseFieldMapper buildPhraseMapper(FieldType fieldType, TextFieldType parent) { + private SubFieldInfo buildPhraseInfo(FieldType fieldType, TextFieldType parent) { if (indexPhrases.get() == false) { return null; } @@ -383,24 +381,24 @@ private PhraseFieldMapper buildPhraseMapper(FieldType fieldType, TextFieldType p parent.setIndexPhrases(); PhraseWrappedAnalyzer a = new PhraseWrappedAnalyzer(analyzers.getIndexAnalyzer().analyzer(), analyzers.positionIncrementGap.get()); - return new PhraseFieldMapper(phraseFieldType, new PhraseFieldType(parent), a); + return new SubFieldInfo(parent.name() + FAST_PHRASE_SUFFIX, phraseFieldType, a); } public Map indexAnalyzers(String name, - PhraseFieldMapper phraseFieldMapper, - PrefixFieldMapper prefixFieldMapper) { + SubFieldInfo phraseFieldInfo, + SubFieldInfo prefixFieldInfo) { Map analyzers = new HashMap<>(); NamedAnalyzer main = this.analyzers.getIndexAnalyzer(); analyzers.put(name, main); - if (phraseFieldMapper != null) { + if (phraseFieldInfo != null) { analyzers.put( - phraseFieldMapper.name(), - new NamedAnalyzer(main.name() + "_phrase", AnalyzerScope.INDEX, phraseFieldMapper.analyzer)); + phraseFieldInfo.field, + new NamedAnalyzer(main.name() + "_phrase", AnalyzerScope.INDEX, phraseFieldInfo.analyzer)); } - if (prefixFieldMapper != null) { + if (prefixFieldInfo != null) { analyzers.put( - prefixFieldMapper.name(), - new NamedAnalyzer(main.name() + "_prefix", AnalyzerScope.INDEX, prefixFieldMapper.analyzer)); + prefixFieldInfo.field, + new NamedAnalyzer(main.name() + "_prefix", AnalyzerScope.INDEX, prefixFieldInfo.analyzer)); } return analyzers; } @@ -409,12 +407,18 @@ public Map indexAnalyzers(String name, public TextFieldMapper build(ContentPath contentPath) { FieldType fieldType = TextParams.buildFieldType(index, store, indexOptions, norms, termVectors); TextFieldType tft = buildFieldType(fieldType, contentPath); - PhraseFieldMapper phraseFieldMapper = buildPhraseMapper(fieldType, tft); - PrefixFieldMapper prefixFieldMapper = buildPrefixMapper(contentPath, fieldType, tft); + SubFieldInfo phraseFieldInfo = buildPhraseInfo(fieldType, tft); + SubFieldInfo prefixFieldInfo = buildPrefixInfo(contentPath, fieldType, tft); + MultiFields multiFields = multiFieldsBuilder.build(this, contentPath); + for (Mapper mapper : multiFields) { + if (mapper.name().endsWith(FAST_PHRASE_SUFFIX) || mapper.name().endsWith(FAST_PREFIX_SUFFIX)) { + throw new MapperParsingException("Cannot use reserved field name [" + mapper.name() + "]"); + } + } return new TextFieldMapper(name, fieldType, tft, - indexAnalyzers(tft.name(), phraseFieldMapper, prefixFieldMapper), - prefixFieldMapper, phraseFieldMapper, - multiFieldsBuilder.build(this, contentPath), copyTo.build(), this); + indexAnalyzers(tft.name(), phraseFieldInfo, prefixFieldInfo), + prefixFieldInfo, phraseFieldInfo, + multiFields, copyTo.build(), this); } } @@ -480,45 +484,14 @@ protected TokenStreamComponents wrapComponents(String fieldName, TokenStreamComp } } - static final class PhraseFieldType extends StringFieldType { - - final TextFieldType parent; - - PhraseFieldType(TextFieldType parent) { - super(parent.name() + FAST_PHRASE_SUFFIX, true, false, false, parent.getTextSearchInfo(), Collections.emptyMap()); - this.parent = parent; - } - - @Override - public String typeName() { - return "phrase"; - } - - @Override - public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) { - // Because this internal field is modelled as a multi-field, SourceValueFetcher will look up its - // parent field in _source. So we don't need to use the parent field name here. - return SourceValueFetcher.toString(name(), context, format); - } - - @Override - public Query existsQuery(QueryShardContext context) { - throw new UnsupportedOperationException(); - } - } - - static final class PrefixFieldType extends StringFieldType { + private static final class PrefixFieldType extends StringFieldType { final int minChars; final int maxChars; final TextFieldType parentField; - PrefixFieldType(TextFieldType parentField, String name, PrefixConfig config) { - this(parentField, name, config.minChars, config.maxChars); - } - - PrefixFieldType(TextFieldType parentField, String name, int minChars, int maxChars) { - super(name, true, false, false, parentField.getTextSearchInfo(), Collections.emptyMap()); + PrefixFieldType(TextFieldType parentField, int minChars, int maxChars) { + super(parentField.name() + FAST_PREFIX_SUFFIX, true, false, false, parentField.getTextSearchInfo(), Collections.emptyMap()); this.minChars = minChars; this.maxChars = maxChars; this.parentField = parentField; @@ -526,9 +499,7 @@ static final class PrefixFieldType extends StringFieldType { @Override public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) { - // Because this internal field is modelled as a multi-field, SourceValueFetcher will look up its - // parent field in _source. So we don't need to use the parent field name here. - return SourceValueFetcher.toString(name(), context, format); + throw new UnsupportedOperationException(); } boolean accept(int length) { @@ -596,67 +567,18 @@ public Query existsQuery(QueryShardContext context) { } } - private static final class PhraseFieldMapper extends FieldMapper { + private static final class SubFieldInfo { private final Analyzer analyzer; private final FieldType fieldType; + private final String field; - PhraseFieldMapper(FieldType fieldType, PhraseFieldType mappedFieldType, PhraseWrappedAnalyzer analyzer) { - super(mappedFieldType.name(), mappedFieldType, MultiFields.empty(), CopyTo.empty()); + SubFieldInfo(String field, FieldType fieldType, Analyzer analyzer) { this.fieldType = fieldType; this.analyzer = analyzer; + this.field = field; } - @Override - protected void parseCreateField(ParseContext context) { - throw new UnsupportedOperationException(); - } - - @Override - public Builder getMergeBuilder() { - return null; - } - - @Override - protected String contentType() { - return "phrase"; - } - } - - private static final class PrefixFieldMapper extends FieldMapper { - - private final Analyzer analyzer; - private final FieldType fieldType; - - protected PrefixFieldMapper(FieldType fieldType, PrefixFieldType mappedFieldType, Analyzer analyzer) { - super(mappedFieldType.name(), mappedFieldType, MultiFields.empty(), CopyTo.empty()); - this.analyzer = analyzer; - this.fieldType = fieldType; - } - - void addField(ParseContext context, String value) { - context.doc().add(new Field(fieldType().name(), value, fieldType)); - } - - @Override - protected void parseCreateField(ParseContext context) { - throw new UnsupportedOperationException(); - } - - @Override - public Builder getMergeBuilder() { - return null; - } - - @Override - protected String contentType() { - return "prefix"; - } - - @Override - public String toString() { - return fieldType().toString(); - } } public static class TextFieldType extends StringFieldType { @@ -708,8 +630,8 @@ int fielddataMinSegmentSize() { return filter.minSegmentSize; } - void setPrefixFieldType(PrefixFieldType prefixFieldType) { - this.prefixFieldType = prefixFieldType; + void setIndexPrefixes(int minChars, int maxChars) { + this.prefixFieldType = new PrefixFieldType(this, minChars, maxChars); } void setIndexPhrases() { @@ -868,14 +790,14 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S private final Builder builder; private final FieldType fieldType; - private final PrefixFieldMapper prefixFieldMapper; - private final PhraseFieldMapper phraseFieldMapper; + private final SubFieldInfo prefixFieldInfo; + private final SubFieldInfo phraseFieldInfo; protected TextFieldMapper(String simpleName, FieldType fieldType, TextFieldType mappedFieldType, Map indexAnalyzers, - PrefixFieldMapper prefixFieldMapper, - PhraseFieldMapper phraseFieldMapper, + SubFieldInfo prefixFieldInfo, + SubFieldInfo phraseFieldInfo, MultiFields multiFields, CopyTo copyTo, Builder builder) { super(simpleName, mappedFieldType, indexAnalyzers, multiFields, copyTo); assert mappedFieldType.getTextSearchInfo().isTokenized(); @@ -884,8 +806,8 @@ protected TextFieldMapper(String simpleName, FieldType fieldType, throw new IllegalArgumentException("Cannot enable fielddata on a [text] field that is not indexed: [" + name() + "]"); } this.fieldType = fieldType; - this.prefixFieldMapper = prefixFieldMapper; - this.phraseFieldMapper = phraseFieldMapper; + this.prefixFieldInfo = prefixFieldInfo; + this.phraseFieldInfo = phraseFieldInfo; this.builder = builder; } @@ -913,30 +835,15 @@ protected void parseCreateField(ParseContext context) throws IOException { if (fieldType.omitNorms()) { createFieldNamesField(context); } - if (prefixFieldMapper != null) { - prefixFieldMapper.addField(context, value); + if (prefixFieldInfo != null) { + context.doc().add(new Field(prefixFieldInfo.field, value, prefixFieldInfo.fieldType)); } - if (phraseFieldMapper != null) { - context.doc().add(new Field(phraseFieldMapper.fieldType().name(), value, phraseFieldMapper.fieldType)); + if (phraseFieldInfo != null) { + context.doc().add(new Field(phraseFieldInfo.field, value, phraseFieldInfo.fieldType)); } } } - @Override - public Iterator iterator() { - List subIterators = new ArrayList<>(); - if (prefixFieldMapper != null) { - subIterators.add(prefixFieldMapper); - } - if (phraseFieldMapper != null) { - subIterators.add(phraseFieldMapper); - } - if (subIterators.size() == 0) { - return super.iterator(); - } - return Iterators.concat(super.iterator(), subIterators.iterator()); - } - @Override protected String contentType() { return CONTENT_TYPE; @@ -1020,10 +927,11 @@ public static Query createPhrasePrefixQuery(TokenStream stream, String field, in } if (terms.length == 1) { - Term[] newTerms = Arrays.stream(terms[0]) + SynonymQuery.Builder sb = new SynonymQuery.Builder(prefixField); + Arrays.stream(terms[0]) .map(term -> new Term(prefixField, term.bytes())) - .toArray(Term[]::new); - return new SynonymQuery(newTerms); + .forEach(sb::addTerm); + return sb.build(); } SpanNearQuery.Builder spanQuery = new SpanNearQuery.Builder(field, true); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 891c1aa2f7fc1..ada1a3a25743b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -37,9 +37,12 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.queryparser.classic.ParseException; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; @@ -49,6 +52,7 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.xcontent.ToXContent; @@ -67,6 +71,7 @@ import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.index.search.QueryStringQueryParser; import java.io.IOException; import java.util.Arrays; @@ -78,6 +83,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.when; public class TextFieldMapperTests extends MapperTestCase { @@ -573,8 +579,6 @@ public void testIndexPrefixIndexTypes() throws IOException { .field("index_options", "offsets") ) ); - FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); - assertEquals(prefix.name(), "field._index_prefix"); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "some text"))); IndexableField field = doc.rootDoc().getField("field._index_prefix"); assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, field.fieldType().indexOptions()); @@ -590,10 +594,8 @@ public void testIndexPrefixIndexTypes() throws IOException { .field("index_options", "freqs") ) ); - FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "some text"))); IndexableField field = doc.rootDoc().getField("field._index_prefix"); - assertEquals(prefix.name(), "field._index_prefix"); assertEquals(IndexOptions.DOCS, field.fieldType().indexOptions()); assertFalse(field.fieldType().storeTermVectors()); } @@ -608,10 +610,8 @@ public void testIndexPrefixIndexTypes() throws IOException { .field("index_options", "positions") ) ); - FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "some text"))); IndexableField field = doc.rootDoc().getField("field._index_prefix"); - assertEquals(prefix.name(), "field._index_prefix"); assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, field.fieldType().indexOptions()); assertFalse(field.fieldType().storeTermVectors()); } @@ -626,10 +626,8 @@ public void testIndexPrefixIndexTypes() throws IOException { .field("term_vector", "with_positions_offsets") ) ); - FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "some text"))); IndexableField field = doc.rootDoc().getField("field._index_prefix"); - assertEquals(prefix.name(), "field._index_prefix"); assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, field.fieldType().indexOptions()); assertTrue(field.fieldType().storeTermVectorOffsets()); } @@ -644,10 +642,8 @@ public void testIndexPrefixIndexTypes() throws IOException { .field("term_vector", "with_positions") ) ); - FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "some text"))); IndexableField field = doc.rootDoc().getField("field._index_prefix"); - assertEquals(prefix.name(), "field._index_prefix"); assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, field.fieldType().indexOptions()); assertFalse(field.fieldType().storeTermVectorOffsets()); } @@ -780,6 +776,46 @@ protected TokenStreamComponents createComponents(String fieldName) { assertThat(e.getMessage(), containsString("Cannot set index_phrases on field [field] if positions are not enabled")); } + public void testObjectExistsQuery() throws IOException, ParseException { + MapperService ms = createMapperService(mapping(b -> { + b.startObject("foo"); + { + b.field("type", "object"); + b.startObject("properties"); + { + b.startObject("bar"); + { + b.field("type", "text"); + b.field("index_phrases", true); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + })); + QueryShardContext qsc = createQueryShardContext(ms); + when(qsc.indexVersionCreated()).thenReturn(Version.CURRENT); + QueryStringQueryParser parser = new QueryStringQueryParser(qsc, "f"); + Query q = parser.parse("foo:*"); + assertEquals(new ConstantScoreQuery(new BooleanQuery.Builder() + .add(new NormsFieldExistsQuery("foo.bar"), BooleanClause.Occur.SHOULD) + .build()), q); + } + + private static void assertAnalyzesTo(Analyzer analyzer, String field, String input, String[] output) throws IOException { + try (TokenStream ts = analyzer.tokenStream(field, input)) { + ts.reset(); + CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); + for (String t : output) { + assertTrue(ts.incrementToken()); + assertEquals(t, termAtt.toString()); + } + assertFalse(ts.incrementToken()); + ts.end(); + } + } + public void testIndexPrefixMapping() throws IOException { { @@ -789,33 +825,34 @@ public void testIndexPrefixMapping() throws IOException { .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 2) - .field("max_chars", 10) + .field("max_chars", 6) .endObject() ) ); - assertThat(ms.documentMapper().mappers().getMapper("field._index_prefix").toString(), containsString("prefixChars=2:10")); - ParsedDocument doc = ms.documentMapper().parse(source(b -> b.field("field", "Some English text that is going to be very useful"))); IndexableField[] fields = doc.rootDoc().getFields("field._index_prefix"); assertEquals(1, fields.length); withLuceneIndex(ms, iw -> iw.addDocument(doc.rootDoc()), ir -> {}); // check we can index + + assertAnalyzesTo(ms.indexAnalyzer(), "field._index_prefix", "tweedledum", + new String[]{ "tw", "twe", "twee", "tweed", "tweedl" }); } { - DocumentMapper mapper = createDocumentMapper( + MapperService ms = createMapperService( fieldMapping(b -> b.field("type", "text").field("analyzer", "standard").startObject("index_prefixes").endObject()) ); - assertThat(mapper.mappers().getMapper("field._index_prefix").toString(), containsString("prefixChars=2:5")); - + assertAnalyzesTo(ms.indexAnalyzer(), "field._index_prefix", "tweedledum", + new String[]{ "tw", "twe", "twee", "tweed" }); } { - DocumentMapper mapper = createDocumentMapper( + MapperService ms = createMapperService( fieldMapping(b -> b.field("type", "text").nullField("index_prefixes")) ); - assertNull(mapper.mappers().getMapper("field._index_prefix")); + expectThrows(Exception.class, () -> ms.indexAnalyzer().tokenStream("field._index_prefixes", "test")); } { @@ -824,7 +861,7 @@ public void testIndexPrefixMapping() throws IOException { b.startObject("index_prefixes").field("min_chars", 1).field("max_chars", 10).endObject(); b.startObject("fields").startObject("_index_prefix").field("type", "text").endObject().endObject(); }))); - assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined more than once")); + assertThat(e.getMessage(), containsString("Cannot use reserved field name [field._index_prefix]")); } { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java index 2b9b73b4a049c..781c4bb2afaee 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java @@ -133,7 +133,7 @@ public void testFuzzyQuery() { public void testIndexPrefixes() { TextFieldType ft = createFieldType(); - ft.setPrefixFieldType(new TextFieldMapper.PrefixFieldType(ft, "field._index_prefix", 2, 10)); + ft.setIndexPrefixes(2, 10); Query q = ft.prefixQuery("goin", CONSTANT_SCORE_REWRITE, false, randomMockShardContext()); assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field._index_prefix", "goin"))), q); @@ -164,19 +164,8 @@ public void testIndexPrefixes() { public void testFetchSourceValue() throws IOException { TextFieldType fieldType = createFieldType(); - - assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(fieldType, "value")); - assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(fieldType, 42L)); - assertEquals(org.elasticsearch.common.collect.List.of("true"), fetchSourceValue(fieldType, true)); - - TextFieldMapper.PrefixFieldType prefixFieldType = new TextFieldMapper.PrefixFieldType(fieldType, "field._index_prefix", 2, 10); - assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(prefixFieldType, "value")); - assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(prefixFieldType, 42L)); - assertEquals(org.elasticsearch.common.collect.List.of("true"), fetchSourceValue(prefixFieldType, true)); - - TextFieldMapper.PhraseFieldType phraseFieldType = new TextFieldMapper.PhraseFieldType(fieldType); - assertEquals(org.elasticsearch.common.collect.List.of("value"), fetchSourceValue(phraseFieldType, "value")); - assertEquals(org.elasticsearch.common.collect.List.of("42"), fetchSourceValue(phraseFieldType, 42L)); - assertEquals(org.elasticsearch.common.collect.List.of("true"), fetchSourceValue(phraseFieldType, true)); + assertEquals(Collections.singletonList("value"), fetchSourceValue(fieldType, "value")); + assertEquals(Collections.singletonList("42"), fetchSourceValue(fieldType, 42L)); + assertEquals(Collections.singletonList("true"), fetchSourceValue(fieldType, true)); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java index c1d4fdf4a3e97..c93df3ad6013e 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java @@ -383,8 +383,8 @@ public void testTextSubFields() throws IOException { .endObject(); Map fields = fetchFields(mapperService, source, "*"); - assertThat(fields.size(), equalTo(3)); - assertThat(fields.keySet(), containsInAnyOrder("field", "field._index_prefix", "field._index_phrase")); + assertThat(fields.size(), equalTo(1)); + assertThat(fields.keySet(), containsInAnyOrder("field")); for (DocumentField field : fields.values()) { assertThat(field.getValues().size(), equalTo(1));