Skip to content

Commit

Permalink
Make full text queries work against keyword scripted fields (#59990)
Browse files Browse the repository at this point in the history
The keyword scripted field should apply the keyword analyzer at all times, by providing it as part of TextSearchInfo to the base mapped field type class. That is because the field does not support normalization, and if we don't force the keyword analyzer the default search analyzer will get picked up which will lowercase the query etc.

Relates to #59332
  • Loading branch information
javanna authored Jul 21, 2020
1 parent 26506c4 commit bc3267b
Show file tree
Hide file tree
Showing 4 changed files with 56 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ abstract class AbstractScriptMappedFieldType extends MappedFieldType {
protected final Script script;

AbstractScriptMappedFieldType(String name, Script script, Map<String, String> meta) {
super(name, false, false, TextSearchInfo.NONE, meta);
super(name, false, false, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.script = script;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESTestCase;

import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

Expand All @@ -19,12 +21,20 @@ protected QueryShardContext mockContext() {
return mockContext(true);
}

protected QueryShardContext mockContext(boolean allowExpensiveQueries) {
protected static QueryShardContext mockContext(boolean allowExpensiveQueries) {
return mockContext(allowExpensiveQueries, null);
}

protected static QueryShardContext mockContext(boolean allowExpensiveQueries, AbstractScriptMappedFieldType mappedFieldType) {
MapperService mapperService = mock(MapperService.class);
when(mapperService.fieldType(anyString())).thenReturn(mappedFieldType);
QueryShardContext context = mock(QueryShardContext.class);
if (mappedFieldType != null) {
when(context.fieldMapper(anyString())).thenReturn(mappedFieldType);
when(context.getSearchAnalyzer(any())).thenReturn(mappedFieldType.getTextSearchInfo().getSearchAnalyzer());
}
when(context.allowExpensiveQueries()).thenReturn(allowExpensiveQueries);
when(context.lookup()).thenReturn(new SearchLookup(mapperService, mft -> null));
return context;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.store.Directory;
Expand All @@ -23,6 +24,7 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.painless.PainlessPlugin;
import org.elasticsearch.painless.PainlessScriptEngine;
Expand Down Expand Up @@ -248,15 +250,29 @@ public void testWildcardQueryIsExpensive() throws IOException {
checkExpensiveQuery((ft, ctx) -> ft.wildcardQuery(randomAlphaOfLengthBetween(1, 1000), null, ctx));
}

private ScriptKeywordMappedFieldType build(String code) throws IOException {
public void testMatchQuery() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": 1}"))));
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": 2}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
ScriptKeywordMappedFieldType fieldType = build("value(source.foo.toString() + params.param)", Map.of("param", "-Suffix"));
QueryShardContext queryShardContext = mockContext(true, fieldType);
Query query = new MatchQueryBuilder("test", "1-Suffix").toQuery(queryShardContext);
assertThat(searcher.count(query), equalTo(1));
}
}
}

private static ScriptKeywordMappedFieldType build(String code) throws IOException {
return build(new Script(code));
}

private ScriptKeywordMappedFieldType build(String code, Map<String, Object> params) throws IOException {
private static ScriptKeywordMappedFieldType build(String code, Map<String, Object> params) throws IOException {
return build(new Script(ScriptType.INLINE, PainlessScriptEngine.NAME, code, params));
}

private ScriptKeywordMappedFieldType build(Script script) throws IOException {
private static ScriptKeywordMappedFieldType build(Script script) throws IOException {
PainlessPlugin painlessPlugin = new PainlessPlugin();
painlessPlugin.loadExtensions(new ExtensionLoader() {
@Override
Expand All @@ -272,7 +288,7 @@ public <T> List<T> loadExtensions(Class<T> extensionPointType) {
}
}

private void checkExpensiveQuery(BiConsumer<ScriptKeywordMappedFieldType, QueryShardContext> queryBuilder) throws IOException {
private static void checkExpensiveQuery(BiConsumer<ScriptKeywordMappedFieldType, QueryShardContext> queryBuilder) throws IOException {
ScriptKeywordMappedFieldType ft = build("value('cat')");
Exception e = expectThrows(ElasticsearchException.class, () -> queryBuilder.accept(ft, mockContext(false)));
assertThat(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,29 @@ setup:
- match: {hits.total.value: 1}
- match: {hits.hits.0._source.voltage: 5.8}

---
"match query":
- do:
search:
index: sensor
body:
query:
match:
day_of_week: Monday
- match: {hits.total.value: 1}
- match: {hits.hits.0._source.voltage: 5.8}

- do:
search:
index: sensor
body:
query:
match:
day_of_week:
query: Monday
analyzer: standard
- match: {hits.total.value: 0}

---
"terms query":
- do:
Expand Down

0 comments on commit bc3267b

Please sign in to comment.