Skip to content

Commit

Permalink
Make Document a top-level class (elastic#74472)
Browse files Browse the repository at this point in the history
There is no reason for Document to be an inner class of ParseContext, especially as it is public and accessed directly from many different places.

This commit takes it out to its own top-level class file, which has the advantage of simplifying ParseContext which could use some love too.
  • Loading branch information
javanna authored Jun 24, 2021
1 parent d851c3c commit 7d0ef61
Show file tree
Hide file tree
Showing 48 changed files with 331 additions and 318 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ protected void registerParameters(ParameterChecker checker) throws IOException {
}

@Override
protected void assertExistsQuery(MappedFieldType fieldType, Query query, ParseContext.Document fields) {
protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) {
assertThat(query, instanceOf(TermQuery.class));
TermQuery termQuery = (TermQuery) query;
assertEquals("_feature", termQuery.getTerm().field());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,19 +139,19 @@ public TokenStreamComponents createComponents(String fieldName) {

public void testParseNullValue() throws Exception {
DocumentMapper mapper = createIndexWithTokenCountField();
ParseContext.Document doc = parseDocument(mapper, createDocument(null));
LuceneDocument doc = parseDocument(mapper, createDocument(null));
assertNull(doc.getField("test.tc"));
}

public void testParseEmptyValue() throws Exception {
DocumentMapper mapper = createIndexWithTokenCountField();
ParseContext.Document doc = parseDocument(mapper, createDocument(""));
LuceneDocument doc = parseDocument(mapper, createDocument(""));
assertEquals(0, doc.getField("test.tc").numericValue());
}

public void testParseNotNullValue() throws Exception {
DocumentMapper mapper = createIndexWithTokenCountField();
ParseContext.Document doc = parseDocument(mapper, createDocument("three tokens string"));
LuceneDocument doc = parseDocument(mapper, createDocument("three tokens string"));
assertEquals(3, doc.getField("test.tc").numericValue());
}

Expand Down Expand Up @@ -179,7 +179,7 @@ private SourceToParse createDocument(String fieldValue) throws Exception {
return source(b -> b.field("test", fieldValue));
}

private ParseContext.Document parseDocument(DocumentMapper mapper, SourceToParse request) {
private LuceneDocument parseDocument(DocumentMapper mapper, SourceToParse request) {
return mapper.parse(request)
.docs().stream().findFirst().orElseThrow(() -> new IllegalStateException("Test object not parsed"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.query.AbstractQueryBuilder;
Expand Down Expand Up @@ -596,7 +596,7 @@ static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection<P
Directory directory = new ByteBuffersDirectory();
try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(analyzer))) {
// Indexing in order here, so that the user provided order matches with the docid sequencing:
Iterable<ParseContext.Document> iterable = () -> docs.stream()
Iterable<LuceneDocument> iterable = () -> docs.stream()
.map(ParsedDocument::docs)
.flatMap(Collection::stream)
.iterator();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
Expand Down Expand Up @@ -399,7 +400,7 @@ static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbFi
}

void processQuery(Query query, ParseContext context) {
ParseContext.Document doc = context.doc();
LuceneDocument doc = context.doc();
PercolatorFieldType pft = (PercolatorFieldType) this.fieldType();
QueryAnalyzer.Result result;
Version indexVersion = context.indexSettings().getIndexVersionCreated();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.NumberFieldMapper;
Expand Down Expand Up @@ -230,7 +231,7 @@ public void testDuel() throws Exception {
return new FunctionScoreQuery(innerQuery, minScore, 1f);
});

List<ParseContext.Document> documents = new ArrayList<>();
List<LuceneDocument> documents = new ArrayList<>();
for (Supplier<Query> queryFunction : queryFunctions) {
Query query = queryFunction.get();
addQuery(query, documents);
Expand Down Expand Up @@ -326,7 +327,7 @@ public void testDuel2() throws Exception {
ranges.add(new int[]{15, 50});

SearchExecutionContext context = createSearchContext(indexService).getSearchExecutionContext();
List<ParseContext.Document> documents = new ArrayList<>();
List<LuceneDocument> documents = new ArrayList<>();
{
addQuery(new TermQuery(new Term("string_field", randomFrom(stringValues))), documents);
}
Expand Down Expand Up @@ -490,7 +491,7 @@ public void testDuelIdBased() throws Exception {
queryFunctions.add((id) -> new MatchNoDocsQuery("no reason at all"));

int numDocs = randomIntBetween(queryFunctions.size(), queryFunctions.size() * 3);
List<ParseContext.Document> documents = new ArrayList<>();
List<LuceneDocument> documents = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
Query query = queryFunctions.get(i % queryFunctions.size()).apply(id);
Expand Down Expand Up @@ -520,7 +521,7 @@ public void testDuelIdBased() throws Exception {
}

public void testDuelSpecificQueries() throws Exception {
List<ParseContext.Document> documents = new ArrayList<>();
List<LuceneDocument> documents = new ArrayList<>();

CommonTermsQuery commonTermsQuery = new CommonTermsQuery(Occur.SHOULD, Occur.SHOULD, 128);
commonTermsQuery.add(new Term("field", "quick"));
Expand Down Expand Up @@ -573,7 +574,7 @@ public void testDuelSpecificQueries() throws Exception {
}

public void testRangeQueries() throws Exception {
List<ParseContext.Document> docs = new ArrayList<>();
List<LuceneDocument> docs = new ArrayList<>();
addQuery(IntPoint.newRangeQuery("int_field", 0, 5), docs);
addQuery(LongPoint.newRangeQuery("long_field", 5L, 10L), docs);
addQuery(HalfFloatPoint.newRangeQuery("half_float_field", 10, 15), docs);
Expand Down Expand Up @@ -640,7 +641,7 @@ public void testRangeQueries() throws Exception {
}

public void testDuelRangeQueries() throws Exception {
List<ParseContext.Document> documents = new ArrayList<>();
List<LuceneDocument> documents = new ArrayList<>();

int lowerInt = randomIntBetween(0, 256);
int upperInt = lowerInt + randomIntBetween(0, 32);
Expand Down Expand Up @@ -743,7 +744,7 @@ public void testDuelRangeQueries() throws Exception {
}

public void testPercolateMatchAll() throws Exception {
List<ParseContext.Document> docs = new ArrayList<>();
List<LuceneDocument> docs = new ArrayList<>();
addQuery(new MatchAllDocsQuery(), docs);
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
Expand Down Expand Up @@ -789,7 +790,7 @@ public void testPercolateMatchAll() throws Exception {
}

public void testFunctionScoreQuery() throws Exception {
List<ParseContext.Document> docs = new ArrayList<>();
List<LuceneDocument> docs = new ArrayList<>();
addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), null, 1f), docs);
addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), 10f, 1f), docs);
addQuery(new FunctionScoreQuery(new MatchAllDocsQuery(), null, 1f), docs);
Expand All @@ -814,7 +815,7 @@ public void testFunctionScoreQuery() throws Exception {
}

public void testPercolateSmallAndLargeDocument() throws Exception {
List<ParseContext.Document> docs = new ArrayList<>();
List<LuceneDocument> docs = new ArrayList<>();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
builder.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
Expand Down Expand Up @@ -907,7 +908,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception {
}

public void testDuplicatedClauses() throws Exception {
List<ParseContext.Document> docs = new ArrayList<>();
List<LuceneDocument> docs = new ArrayList<>();

BooleanQuery.Builder builder = new BooleanQuery.Builder();
BooleanQuery.Builder builder1 = new BooleanQuery.Builder();
Expand Down Expand Up @@ -956,7 +957,7 @@ public void testDuplicatedClauses() throws Exception {
}

public void testDuplicatedClauses2() throws Exception {
List<ParseContext.Document> docs = new ArrayList<>();
List<LuceneDocument> docs = new ArrayList<>();

BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setMinimumNumberShouldMatch(3);
Expand Down Expand Up @@ -1007,7 +1008,7 @@ public void testDuplicatedClauses2() throws Exception {
public void testMsmAndRanges_disjunction() throws Exception {
// Recreates a similar scenario that made testDuel() fail randomly:
// https://github.com/elastic/elasticsearch/issues/29393
List<ParseContext.Document> docs = new ArrayList<>();
List<LuceneDocument> docs = new ArrayList<>();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setMinimumNumberShouldMatch(2);

Expand Down Expand Up @@ -1107,11 +1108,11 @@ private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryInd
}
}

private void addQuery(Query query, List<ParseContext.Document> docs) {
private void addQuery(Query query, List<LuceneDocument> docs) {
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(
documentMapper.mappers(), indexService.getIndexSettings(), indexService.getIndexAnalyzers(), null, null, null);
fieldMapper.processQuery(query, parseContext);
ParseContext.Document queryDocument = parseContext.doc();
LuceneDocument queryDocument = parseContext.doc();
// Add to string representation of the query to make debugging easier:
queryDocument.add(new StoredField("query_to_string", query.toString()));
docs.add(queryDocument);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
Expand Down Expand Up @@ -173,7 +174,7 @@ public void testExtractTerms() throws Exception {
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
mapperService.getIndexSettings(), null, null, null, null);
fieldMapper.processQuery(bq.build(), parseContext);
ParseContext.Document document = parseContext.doc();
LuceneDocument document = parseContext.doc();

PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE));
Expand Down Expand Up @@ -225,7 +226,7 @@ public void testExtractRanges() throws Exception {
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
mapperService.getIndexSettings(), null, null, null, null);
fieldMapper.processQuery(bq.build(), parseContext);
ParseContext.Document document = parseContext.doc();
LuceneDocument document = parseContext.doc();

PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
Expand Down Expand Up @@ -275,7 +276,7 @@ public void testExtractTermsAndRanges_failed() throws Exception {
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
mapperService.getIndexSettings(), null, null, null, null);
fieldMapper.processQuery(query, parseContext);
ParseContext.Document document = parseContext.doc();
LuceneDocument document = parseContext.doc();

PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getFields().size(), equalTo(1));
Expand All @@ -290,7 +291,7 @@ public void testExtractTermsAndRanges_partial() throws Exception {
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
mapperService.getIndexSettings(), null, null, null, null);
fieldMapper.processQuery(phraseQuery, parseContext);
ParseContext.Document document = parseContext.doc();
LuceneDocument document = parseContext.doc();

PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getFields().size(), equalTo(3));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.elasticsearch.index.mapper.BinaryFieldMapper;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.query.TermQueryBuilder;
Expand Down Expand Up @@ -68,7 +69,7 @@ public void testStoringQueryBuilders() throws IOException {
for (int i = 0; i < queryBuilders.length; i++) {
queryBuilders[i] = new TermQueryBuilder(randomAlphaOfLength(4), randomAlphaOfLength(8));
ParseContext parseContext = mock(ParseContext.class);
ParseContext.Document document = new ParseContext.Document();
LuceneDocument document = new LuceneDocument();
when(parseContext.doc()).thenReturn(document);
PercolatorFieldMapper.createQueryBuilderField(version,
fieldMapper, queryBuilders[i], parseContext);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
import org.elasticsearch.index.mapper.DocumentParser;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.seqno.SeqNoStats;
Expand Down Expand Up @@ -1391,7 +1391,7 @@ public String routing() {
return this.doc.routing();
}

public List<Document> docs() {
public List<LuceneDocument> docs() {
return this.doc.docs();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,9 @@
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.mapper.DocumentParser;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
Expand Down Expand Up @@ -1183,7 +1183,7 @@ private boolean mayHaveBeenIndexedBefore(Index index) {
return mayHaveBeenIndexBefore;
}

private void addDocs(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException {
private void addDocs(final List<LuceneDocument> docs, final IndexWriter indexWriter) throws IOException {
if (docs.size() > 1) {
indexWriter.addDocuments(docs);
} else {
Expand All @@ -1192,9 +1192,9 @@ private void addDocs(final List<ParseContext.Document> docs, final IndexWriter i
numDocAppends.inc(docs.size());
}

private void addStaleDocs(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException {
private void addStaleDocs(final List<LuceneDocument> docs, final IndexWriter indexWriter) throws IOException {
assert softDeleteEnabled : "Add history documents but soft-deletes is disabled";
for (ParseContext.Document doc : docs) {
for (LuceneDocument doc : docs) {
doc.add(softDeletesField); // soft-deleted every document before adding to Lucene
}
if (docs.size() > 1) {
Expand Down Expand Up @@ -1286,7 +1286,7 @@ private boolean assertDocDoesNotExist(final Index index, final boolean allowDele
return true;
}

private void updateDocs(final Term uid, final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException {
private void updateDocs(final Term uid, final List<LuceneDocument> docs, final IndexWriter indexWriter) throws IOException {
if (softDeleteEnabled) {
if (docs.size() > 1) {
indexWriter.softUpdateDocuments(uid, docs, softDeletesField);
Expand Down Expand Up @@ -1474,7 +1474,7 @@ private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws
assert tombstone.docs().size() == 1 : "Tombstone doc should have single doc [" + tombstone + "]";
tombstone.updateSeqID(delete.seqNo(), delete.primaryTerm());
tombstone.version().setLongValue(plan.versionOfDeletion);
final ParseContext.Document doc = tombstone.docs().get(0);
final LuceneDocument doc = tombstone.docs().get(0);
assert doc.getField(SeqNoFieldMapper.TOMBSTONE_NAME) != null :
"Delete tombstone document but _tombstone field is not set [" + doc + " ]";
doc.add(softDeletesField);
Expand Down Expand Up @@ -1614,7 +1614,7 @@ private NoOpResult innerNoOp(final NoOp noOp) throws IOException {
// version field.
tombstone.version().setLongValue(1L);
assert tombstone.docs().size() == 1 : "Tombstone should have a single doc [" + tombstone + "]";
final ParseContext.Document doc = tombstone.docs().get(0);
final LuceneDocument doc = tombstone.docs().get(0);
assert doc.getField(SeqNoFieldMapper.TOMBSTONE_NAME) != null
: "Noop tombstone document but _tombstone field is not set [" + doc + " ]";
doc.add(softDeletesField);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ int getMaxInputLength() {
* "OBJECT" - { "input": STRING|ARRAY, "weight": STRING|INT, "contexts": ARRAY|OBJECT }
*
* Indexing:
* if context mappings are defined, delegates to {@link ContextMappings#addField(ParseContext.Document, String, String, int, Map)}
* if context mappings are defined, delegates to {@link ContextMappings#addField(LuceneDocument, String, String, int, Map)}
* else adds inputs as a {@link org.apache.lucene.search.suggest.document.SuggestField}
*/
@Override
Expand Down
Loading

0 comments on commit 7d0ef61

Please sign in to comment.