Skip to content

Commit

Permalink
Fix search_as_you_type field with term_vector (#66432)
Browse files Browse the repository at this point in the history
This commit fixes a bug in the search_as_you_type field that was introduced during
the refactoring of the field mapper. The prefix field that is used internally
by the search_as_you_type mapper doesn't need term vector even if they are activated
on the main field. So this commit ensures that we don't copy the options from the main
field when we create the prefix sub-field.

Closes #66407
  • Loading branch information
jimczi committed Dec 16, 2020
1 parent 9432aea commit 9431186
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -166,8 +166,8 @@ public SearchAsYouTypeFieldMapper build(ContentPath contentPath) {
indexAnalyzers.put(ft.name(), indexAnalyzer);

// set up the prefix field
FieldType prefixft = new FieldType(fieldType);
prefixft.setStoreTermVectors(false);
FieldType prefixft = new FieldType();
prefixft.setIndexOptions(fieldType.indexOptions());
prefixft.setOmitNorms(true);
prefixft.setStored(false);
final String fullName = buildFullName(contentPath);
Expand All @@ -179,10 +179,8 @@ public SearchAsYouTypeFieldMapper build(ContentPath contentPath) {
SearchAsYouTypeAnalyzer.withShingle(searchAnalyzer.analyzer(), maxShingleSize.getValue()));
// don't wrap the root field's search quote analyzer as prefix field doesn't support phrase queries
TextSearchInfo prefixSearchInfo = new TextSearchInfo(prefixft, similarity.getValue(), prefixSearchWrapper, searchAnalyzer);
final PrefixFieldType prefixFieldType
= new PrefixFieldType(fullName, prefixSearchInfo, Defaults.MIN_GRAM, Defaults.MAX_GRAM);
final NamedAnalyzer prefixAnalyzer
= new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, prefixIndexWrapper);
final PrefixFieldType prefixFieldType = new PrefixFieldType(fullName, prefixSearchInfo, Defaults.MIN_GRAM, Defaults.MAX_GRAM);
final NamedAnalyzer prefixAnalyzer = new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, prefixIndexWrapper);
final PrefixFieldMapper prefixFieldMapper = new PrefixFieldMapper(prefixft, prefixFieldType);
indexAnalyzers.put(prefixFieldType.name(), prefixAnalyzer);

Expand All @@ -206,8 +204,7 @@ public SearchAsYouTypeFieldMapper build(ContentPath contentPath) {
final ShingleFieldType shingleFieldType = new ShingleFieldType(fieldName, shingleSize, textSearchInfo);
shingleFieldType.setPrefixFieldType(prefixFieldType);
shingleFieldTypes[i] = shingleFieldType;
NamedAnalyzer shingleAnalyzer
= new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, shingleIndexWrapper);
NamedAnalyzer shingleAnalyzer = new NamedAnalyzer(indexAnalyzer.name(), AnalyzerScope.INDEX, shingleIndexWrapper);
shingleFieldMappers[i] = new ShingleFieldMapper(shingleft, shingleFieldType);
indexAnalyzers.put(shingleFieldType.name(), shingleAnalyzer);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -332,18 +332,48 @@ public void testStoredOnly() throws IOException {
}

public void testTermVectors() throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type").field("term_vector", "yes")));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "some text")));
for (String termVector : new String[] { "yes", "with_positions", "with_offsets", "with_positions_offsets",
"with_positions_payloads", "with_positions_offsets_payloads"}) {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "search_as_you_type")
.field("term_vector", termVector)));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "some text")));

assertTrue(fieldType(doc, "field").storeTermVectors());
IndexableFieldType rootField = fieldType(doc, "field");
assertTrue(rootField.storeTermVectors());
if (termVector.contains("positions")) {
assertThat(rootField.storeTermVectorPositions(), equalTo(termVector.contains("positions")));
}
if (termVector.contains("offsets")) {
assertTrue(rootField.storeTermVectorOffsets());
assertThat(rootField.storeTermVectorOffsets(), equalTo(termVector.contains("offsets")));
}
if (termVector.contains("payloads")) {
assertTrue(rootField.storeTermVectorPayloads());
assertThat(rootField.storeTermVectorPayloads(), equalTo(termVector.contains("payloads")));
}

Stream.of(
fieldType(doc, "field._2gram"),
fieldType(doc, "field._3gram")
).forEach(ft -> assertTrue(ft.storeTermVectors()));
Stream.of(
fieldType(doc, "field._2gram"),
fieldType(doc, "field._3gram")
).forEach(ft -> {
assertTrue(ft.storeTermVectors());
if (termVector.contains("positions")) {
assertThat(ft.storeTermVectorPositions(), equalTo(termVector.contains("positions")));
}
if (termVector.contains("offsets")) {
assertThat(ft.storeTermVectorOffsets(), equalTo(termVector.contains("offsets")));
}
if (termVector.contains("payloads")) {
assertThat(ft.storeTermVectorPayloads(), equalTo(termVector.contains("payloads")));
}
});

PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(mapper, "field._index_prefix");
assertFalse(prefixFieldMapper.fieldType.storeTermVectors());
PrefixFieldMapper prefixFieldMapper = getPrefixFieldMapper(mapper, "field._index_prefix");
assertFalse(prefixFieldMapper.fieldType.storeTermVectors());
assertFalse(prefixFieldMapper.fieldType.storeTermVectorOffsets());
assertFalse(prefixFieldMapper.fieldType.storeTermVectorPositions());
assertFalse(prefixFieldMapper.fieldType.storeTermVectorPayloads());
}
}

public void testNorms() throws IOException {
Expand Down

0 comments on commit 9431186

Please sign in to comment.