Skip to content

Commit

Permalink
Implement fields fetch for runtime fields (#61995)
Browse files Browse the repository at this point in the history
This implements the `fields` API in `_search` for runtime fields using
doc values. Most of that implementation is stolen from the
`docvalue_fields` fetch sub-phase, just moved into the same API that the
`fields` API uses. At this point the `docvalue_fields` fetch phase looks
like a special case of the `fields` API.

While I was at it I moved the "which doc values sub-implementation
should I use for fetching?" question from a bunch of `instanceof`s to a
method on `LeafFieldData` so we can be much more flexible with what is
returned and we're not forced to extend certain classes just to make the
fetch phase happy.

Relates to #59332
  • Loading branch information
nik9000 authored Sep 15, 2020
1 parent 5075e83 commit 9a127ad
Show file tree
Hide file tree
Showing 107 changed files with 640 additions and 341 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ private Float objectToFloat(Object value) {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ private static double objectToDouble(Object value) {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.util.ArrayList;
Expand Down Expand Up @@ -419,7 +420,7 @@ protected void parseCreateField(ParseContext context) {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

Expand Down Expand Up @@ -465,7 +466,7 @@ protected void mergeOptions(FieldMapper other, List<String> conflicts) {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

Expand Down Expand Up @@ -588,7 +589,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.lucene.document.FieldType;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.util.Iterator;
Expand Down Expand Up @@ -159,7 +160,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep
e.getCause().getMessage());
}

public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RankFeatureFieldMapper mapper = new RankFeatureFieldMapper.Builder("field").build(context);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ public void testRejectIndexOptions() {
containsString("Failed to parse mapping: unknown parameter [index_options] on mapper [field] of type [scaled_float]"));
}

public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for metadata field [" + typeName() + "].");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
Expand Down Expand Up @@ -366,7 +367,7 @@ public void parse(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;

import java.io.IOException;
import java.util.ArrayList;
Expand All @@ -54,7 +56,7 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
}

@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {
if (searchContext.highlight() == null) {
return null;
}
Expand Down Expand Up @@ -95,7 +97,7 @@ public void process(HitContext hit) throws IOException {
BytesReference document = percolateQuery.getDocuments().get(slot);
HitContext subContext = new HitContext(
new SearchHit(slot, "unknown", Collections.emptyMap(), Collections.emptyMap()),
percolatorLeafReaderContext, slot, new HashMap<>()
percolatorLeafReaderContext, slot, new SourceLookup(), new HashMap<>()
);
subContext.sourceLookup().setSource(document);
// force source because MemoryIndex does not store fields
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.util.ArrayList;
Expand All @@ -56,7 +57,7 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {
static final String FIELD_NAME_PREFIX = "_percolator_document_slot";

@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {

List<PercolateContext> percolateContexts = new ArrayList<>();
List<PercolateQuery> percolateQueries = locatePercolatorQuery(searchContext.query());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ public void testHitsExecutionNeeded() throws IOException {
Mockito.when(searchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList()));
Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery());

assertNull(subFetchPhase.getProcessor(searchContext));
assertNull(subFetchPhase.getProcessor(searchContext, null));
Mockito.when(searchContext.query()).thenReturn(percolateQuery);
assertNotNull(subFetchPhase.getProcessor(searchContext));
assertNotNull(subFetchPhase.getProcessor(searchContext, null));
}

public void testLocatePercolatorQuery() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;

import java.util.Collections;
Expand All @@ -66,7 +67,7 @@ public void testHitsExecute() throws Exception {
LeafReaderContext context = reader.leaves().get(0);
// A match:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new HashMap<>());
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
Expand All @@ -77,7 +78,7 @@ public void testHitsExecute() throws Exception {
SearchContext sc = mock(SearchContext.class);
when(sc.query()).thenReturn(percolateQuery);

FetchSubPhaseProcessor processor = phase.getProcessor(sc);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
assertNotNull(processor);
processor.process(hit);

Expand All @@ -87,7 +88,7 @@ public void testHitsExecute() throws Exception {

// No match:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new HashMap<>());
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value1", new WhitespaceAnalyzer());
Expand All @@ -98,7 +99,7 @@ public void testHitsExecute() throws Exception {
SearchContext sc = mock(SearchContext.class);
when(sc.query()).thenReturn(percolateQuery);

FetchSubPhaseProcessor processor = phase.getProcessor(sc);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
assertNotNull(processor);
processor.process(hit);

Expand All @@ -107,7 +108,7 @@ public void testHitsExecute() throws Exception {

// No query:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new HashMap<>());
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
PercolateQuery.QueryStore queryStore = ctx -> docId -> null;
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
Expand All @@ -118,7 +119,7 @@ public void testHitsExecute() throws Exception {
SearchContext sc = mock(SearchContext.class);
when(sc.query()).thenReturn(percolateQuery);

FetchSubPhaseProcessor processor = phase.getProcessor(sc);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
assertNotNull(processor);
processor.process(hit);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -735,7 +735,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ public void testUpdateIgnoreAbove() throws IOException {
assertEquals(0, fields.length);
}

public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -678,7 +678,7 @@ public void testEmptyName() throws IOException {
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}

public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.io.Reader;
Expand Down Expand Up @@ -589,7 +590,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ protected void parseCreateField(ParseContext context)
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,15 @@ setup:
rest_total_hits_as_int: true
index: date*
body:
docvalue_fields: [ { "field": "date", "format" : "strict_date_optional_time" }, { "field": "date", "format": "epoch_millis" }, { "field" : "date", "format": "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSX" } ]
sort: [ { "date": "desc" } ]
docvalue_fields:
- field: date
format: strict_date_optional_time
- field: date
format: epoch_millis
- field: date
format: uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSX
sort:
- date: desc

- match: { hits.total: 2 }
- length: { hits.hits: 2 }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
Expand Down Expand Up @@ -114,7 +115,7 @@ private static final class TermVectorsFetchSubPhase implements FetchSubPhase {
private static final String NAME = "term_vectors_fetch";

@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
return new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;

import java.io.IOException;
import java.util.function.LongUnaryOperator;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@

import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.index.mapper.DocValueFetcher;
import org.elasticsearch.search.DocValueFormat;

import java.io.IOException;

/**
* The thread safe {@link org.apache.lucene.index.LeafReader} level cache of the data.
Expand All @@ -37,4 +41,26 @@ public interface LeafFieldData extends Accountable, Releasable {
*/
SortedBinaryDocValues getBytesValues();

/**
* Return a value fetcher for this leaf implementation.
*/
default DocValueFetcher.Leaf getLeafValueFetcher(DocValueFormat format) {
SortedBinaryDocValues values = getBytesValues();
return new DocValueFetcher.Leaf() {
@Override
public boolean advanceExact(int docId) throws IOException {
return values.advanceExact(docId);
}

@Override
public int docValueCount() throws IOException {
return values.docValueCount();
}

@Override
public Object nextValue() throws IOException {
return format.format(values.nextValue());
}
};
}
}
Loading

0 comments on commit 9a127ad

Please sign in to comment.