Skip to content

Commit

Permalink
Add _ignored and _routing metatada fields to fields api (elastic#…
Browse files Browse the repository at this point in the history
…78981)

Currently we don't allow retrieving metadata fields through the fields option in
search but throw an error on this case. In elastic#78828 we started to enable this for
`_id` if the field is explicitely requested. This PR adds `_ignored` and
`_routing` metadata fields which are also internally handled as stored fields to
the list of fields that can be explicitely retrieved.
  • Loading branch information
Christoph Büscher committed Oct 14, 2021
1 parent 7b66bfb commit 58aa5f7
Show file tree
Hide file tree
Showing 6 changed files with 143 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1012,7 +1012,7 @@ error for flattened includes whole path:
test fetching metadata fields:
- skip:
version: ' - 7.15.99'
reason: 'fetching metadata via fields introduced in 7.16'
reason: 'fetching metadata via fields was introduced in 7.16'

- do:
indices.create:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ public Query existsQuery(SearchExecutionContext context) {

@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
return new StoredValueFetcher(context.lookup(), NAME);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ public String typeName() {

@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
return new StoredValueFetcher(context.lookup(), NAME);
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

package org.elasticsearch.index.mapper;

import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.core.List;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;

import static org.hamcrest.Matchers.containsString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

public class IgnoredFieldMapperTests extends MapperServiceTestCase {

public void testIncludeInObjectNotAllowed() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {}));

Exception e = expectThrows(MapperParsingException.class,
() -> docMapper.parse(source(b -> b.field("_ignored", 1))));

assertThat(e.getCause().getMessage(),
containsString("Field [_ignored] is a metadata field and cannot be added inside a document"));
}

public void testDefaults() throws IOException {
DocumentMapper mapper = createDocumentMapper(
mapping(b -> b.startObject("field").field("type", "keyword").field("ignore_above", 3).endObject())
);
ParsedDocument document = mapper.parse(source(b -> b.field("field", "value")));
IndexableField[] fields = document.rootDoc().getFields(IgnoredFieldMapper.NAME);
assertEquals(1, fields.length);
assertEquals(IndexOptions.DOCS, fields[0].fieldType().indexOptions());
assertTrue(fields[0].fieldType().stored());
}

public void testFetchIgnoredFieldValue() throws IOException {
MapperService mapperService = createMapperService(
fieldMapping(b -> b.field("type", "keyword").field("ignore_above", 3))
);
withLuceneIndex(mapperService, iw -> {
iw.addDocument(mapperService.documentMapper().parse(source(b -> b.field("field", "value"))).rootDoc());
}, iw -> {
SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup());
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.lookup()).thenReturn(lookup);
IgnoredFieldMapper.IgnoredFieldType ft = (IgnoredFieldMapper.IgnoredFieldType) mapperService.fieldType("_ignored");
ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null);
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
valueFetcher.setNextReader(context);
assertEquals(List.of("field"), valueFetcher.fetchValues(lookup.source(), List.of()));
});
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@

package org.elasticsearch.index.mapper;

import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.core.List;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentType;

Expand All @@ -17,6 +22,8 @@

import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

public class RoutingFieldMapperTests extends MetadataMapperTestCase {

Expand Down Expand Up @@ -52,4 +59,22 @@ public void testIncludeInObjectNotAllowed() throws Exception {
assertThat(e.getCause().getMessage(),
containsString("Field [_routing] is a metadata field and cannot be added inside a document"));
}

public void testFetchRoutingFieldValue() throws IOException {
MapperService mapperService = createMapperService(mapping(b -> {}));
withLuceneIndex(mapperService, iw -> {
iw.addDocument(mapperService.documentMapper().parse(source("1", b -> {}, "abcd")).rootDoc());
}, iw -> {
SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup());
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.lookup()).thenReturn(lookup);
RoutingFieldMapper.RoutingFieldType ft = (RoutingFieldMapper.RoutingFieldType) mapperService.fieldType("_routing");
ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null);
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
valueFetcher.setNextReader(context);
assertEquals(List.of("abcd"), valueFetcher.fetchValues(lookup.source(), List.of()));
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,27 +8,34 @@

package org.elasticsearch.search.fetch.subphase;

import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperServiceTestCase;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;

import static java.util.Collections.emptyMap;
import static org.elasticsearch.xcontent.ObjectPath.eval;
Expand Down Expand Up @@ -190,9 +197,37 @@ public void testMetadataFields() throws IOException {
assertEquals(100, ((Integer) fields.get("_doc_count").getValue()).intValue());

// several other metadata fields throw exceptions via their value fetchers when trying to get them
for (String fieldname : new String[] { "_index", "_seq_no", "_routing", "_ignored" }) {
for (String fieldname : org.elasticsearch.core.List.of("_index", "_seq_no")) {
expectThrows(UnsupportedOperationException.class, () -> fetchFields(mapperService, source, fieldname));
}

String docId = randomAlphaOfLength(12);
String routing = randomAlphaOfLength(12);
withLuceneIndex(mapperService, iw -> {
iw.addDocument(mapperService.documentMapper().parse(source(docId, b -> b.field("integer_field", "value"), routing)).rootDoc());
}, iw -> {
List<FieldAndFormat> fieldList = org.elasticsearch.core.List.of(
new FieldAndFormat("_id", null),
new FieldAndFormat("_routing", null),
new FieldAndFormat("_ignored", null)
);
FieldFetcher fieldFetcher = FieldFetcher.create(
newSearchExecutionContext(mapperService, (ft, index, sl) -> fieldDataLookup().apply(ft, sl)),
fieldList
);
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0);
fieldFetcher.setNextReader(readerContext);

SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSegmentAndDocument(readerContext, 0);

Map<String, DocumentField> fetchedFields = fieldFetcher.fetch(sourceLookup);
assertThat(fetchedFields.size(), equalTo(3));
assertEquals(docId, fetchedFields.get("_id").getValue());
assertEquals(routing, fetchedFields.get("_routing").getValue());
assertEquals("integer_field", fetchedFields.get("_ignored").getValue());
});
}

public void testFetchAllFields() throws IOException {
Expand Down Expand Up @@ -950,7 +985,7 @@ public MapperService createMapperService() throws IOException {
.startObject("_doc")
.startObject("properties")
.startObject("field").field("type", "keyword").endObject()
.startObject("integer_field").field("type", "integer").endObject()
.startObject("integer_field").field("type", "integer").field("ignore_malformed", "true").endObject()
.startObject("date_field").field("type", "date").endObject()
.startObject("geo_point").field("type", "geo_point").endObject()
.startObject("float_range").field("type", "float_range").endObject()
Expand All @@ -969,6 +1004,13 @@ public MapperService createMapperService() throws IOException {
}

private static SearchExecutionContext newSearchExecutionContext(MapperService mapperService) {
return newSearchExecutionContext(mapperService, null);
}

private static SearchExecutionContext newSearchExecutionContext(
MapperService mapperService,
TriFunction<MappedFieldType, String, Supplier<SearchLookup>, IndexFieldData<?>> indexFieldDataLookup
) {
Settings settings = Settings.builder().put("index.version.created", Version.CURRENT)
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
Expand All @@ -980,7 +1022,7 @@ private static SearchExecutionContext newSearchExecutionContext(MapperService ma
0,
indexSettings,
null,
null,
indexFieldDataLookup,
mapperService,
mapperService.mappingLookup(),
null,
Expand Down

0 comments on commit 58aa5f7

Please sign in to comment.