From 0539997b3785c2600eef9548ee9acefcd418f966 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 25 Mar 2015 10:42:49 +0100 Subject: [PATCH] parent/child: Move over to Lucene's join util and doc values by default This a breaking change: 1) A parent type needs be marked as parent in the _parent field mapping of the parent type. 2) top_children query will be removed. The top_children query was somewhat an alternative to has_child when it came to speed, but it isn't accurate and wasn't always faster. Indices created before 2.0 will use field data and the old way of executing queries, but indices created on or after 2.0 will use the Lucene join. Closes #6107 Closes #6511 Closes #8134 --- .../lucene/join/GlobalOrdinalsCollector.java | 116 + .../lucene/join/GlobalOrdinalsQuery.java | 346 +++ .../GlobalOrdinalsWithScoreCollector.java | 241 ++ .../join/GlobalOrdinalsWithScoreQuery.java | 352 +++ .../java/org/apache/lucene/join/JoinUtil.java | 117 + .../type/TransportSearchTypeAction.java | 1 + .../index/AbstractIndexComponent.java | 4 + .../plain/ParentChildIndexFieldData.java | 65 +- .../mapper/internal/ParentFieldMapper.java | 67 +- .../index/query/HasChildFilterParser.java | 16 +- .../index/query/HasChildQueryParser.java | 68 +- .../index/query/HasParentQueryParser.java | 16 +- .../index/query/QueryParseContext.java | 15 +- .../index/query/TopChildrenQueryBuilder.java | 1 + .../index/query/TopChildrenQueryParser.java | 7 + .../index/search/child/TopChildrenQuery.java | 1 + .../search/query/QueryPhase.java | 1 + .../child/SimpleChildQuerySearchBwcTests.java | 2720 +++++++++++++++++ .../child/SimpleChildQuerySearchTests.java | 362 +-- 19 files changed, 4155 insertions(+), 361 deletions(-) create mode 100644 src/main/java/org/apache/lucene/join/GlobalOrdinalsCollector.java create mode 100644 src/main/java/org/apache/lucene/join/GlobalOrdinalsQuery.java create mode 100644 src/main/java/org/apache/lucene/join/GlobalOrdinalsWithScoreCollector.java create mode 100644 src/main/java/org/apache/lucene/join/GlobalOrdinalsWithScoreQuery.java create mode 100644 src/main/java/org/apache/lucene/join/JoinUtil.java create mode 100644 src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchBwcTests.java diff --git a/src/main/java/org/apache/lucene/join/GlobalOrdinalsCollector.java b/src/main/java/org/apache/lucene/join/GlobalOrdinalsCollector.java new file mode 100644 index 0000000000000..a3396f245ba4e --- /dev/null +++ b/src/main/java/org/apache/lucene/join/GlobalOrdinalsCollector.java @@ -0,0 +1,116 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.join; + +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.util.LongBitSet; +import org.apache.lucene.util.LongValues; + +import java.io.IOException; + +/** + * A collector that collects all ordinals from a specified field matching the query. + * + * @lucene.experimental + */ +final class GlobalOrdinalsCollector implements Collector { + + final String field; + final LongBitSet collectedOrds; + final MultiDocValues.OrdinalMap ordinalMap; + + GlobalOrdinalsCollector(String field, MultiDocValues.OrdinalMap ordinalMap, long valueCount) { + this.field = field; + this.ordinalMap = ordinalMap; + this.collectedOrds = new LongBitSet(valueCount); + } + + public LongBitSet getCollectorOrdinals() { + return collectedOrds; + } + + @Override + public boolean needsScores() { + return false; + } + + @Override + public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { + SortedDocValues docTermOrds = DocValues.getSorted(context.reader(), field); + if (ordinalMap != null) { + LongValues segmentOrdToGlobalOrdLookup = ordinalMap.getGlobalOrds(context.ord); + return new OrdinalMapCollector(docTermOrds, segmentOrdToGlobalOrdLookup); + } else { + return new SegmentOrdinalCollector(docTermOrds); + } + } + + final class OrdinalMapCollector implements LeafCollector { + + private final SortedDocValues docTermOrds; + private final LongValues segmentOrdToGlobalOrdLookup; + + OrdinalMapCollector(SortedDocValues docTermOrds, LongValues segmentOrdToGlobalOrdLookup) { + this.docTermOrds = docTermOrds; + this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup; + } + + @Override + public void collect(int doc) throws IOException { + final long segmentOrd = docTermOrds.getOrd(doc); + if (segmentOrd != -1) { + final long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd); + collectedOrds.set(globalOrd); + } + } + + @Override + public void setScorer(Scorer scorer) throws IOException { + } + } + + final class SegmentOrdinalCollector implements LeafCollector { + + private final SortedDocValues docTermOrds; + + SegmentOrdinalCollector(SortedDocValues docTermOrds) { + this.docTermOrds = docTermOrds; + } + + @Override + public void collect(int doc) throws IOException { + final long segmentOrd = docTermOrds.getOrd(doc); + if (segmentOrd != -1) { + collectedOrds.set(segmentOrd); + } + } + + @Override + public void setScorer(Scorer scorer) throws IOException { + } + } + +} diff --git a/src/main/java/org/apache/lucene/join/GlobalOrdinalsQuery.java b/src/main/java/org/apache/lucene/join/GlobalOrdinalsQuery.java new file mode 100644 index 0000000000000..20dd305972156 --- /dev/null +++ b/src/main/java/org/apache/lucene/join/GlobalOrdinalsQuery.java @@ -0,0 +1,346 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.join; + +import org.apache.lucene.index.*; +import org.apache.lucene.search.*; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.LongBitSet; +import org.apache.lucene.util.LongValues; + +import java.io.IOException; +import java.util.Set; + +/** + */ +final class GlobalOrdinalsQuery extends Query { + + // All the ords of matching docs found with OrdinalsCollector. + private final LongBitSet foundOrds; + private final String joinField; + private final MultiDocValues.OrdinalMap globalOrds; + // Is also an approximation of the docs that will match. Can be all docs that have toField or something more specific. + private final Query toQuery; + // just hashcode and equals + private final Query fromQuery; + + GlobalOrdinalsQuery(LongBitSet foundOrds, String joinField, MultiDocValues.OrdinalMap globalOrds, Query toQuery, Query fromQuery) { + this.foundOrds = foundOrds; + this.joinField = joinField; + this.globalOrds = globalOrds; + this.toQuery = toQuery; + this.fromQuery = fromQuery; + } + + @Override + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new W(this, toQuery.createWeight(searcher, false)); + } + + @Override + public void extractTerms(Set terms) { + fromQuery.extractTerms(terms); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + + GlobalOrdinalsQuery that = (GlobalOrdinalsQuery) o; + + if (!fromQuery.equals(that.fromQuery)) return false; + if (!joinField.equals(that.joinField)) return false; + if (!toQuery.equals(that.toQuery)) return false; + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + joinField.hashCode(); + result = 31 * result + toQuery.hashCode(); + result = 31 * result + fromQuery.hashCode(); + return result; + } + + @Override + public String toString(String field) { + return "GlobalOrdinalsQuery{" + + "joinField=" + joinField + + '}'; + } + + final class W extends Weight { + + private final Weight approximationWeight; + + private float queryNorm; + private float queryWeight; + + W(Query query, Weight approximationWeight) { + super(query); + this.approximationWeight = approximationWeight; + } + + @Override + public Explanation explain(LeafReaderContext context, int doc) throws IOException { + SortedDocValues values = DocValues.getSorted(context.reader(), joinField); + if (values != null) { + int segmentOrd = values.getOrd(doc); + if (segmentOrd != -1) { + BytesRef joinValue = values.lookupOrd(segmentOrd); + return new ComplexExplanation(true, queryNorm, "Score based on join value " + joinValue.utf8ToString()); + } + } + return new ComplexExplanation(false, 0.0f, "Not a match"); + } + + @Override + public float getValueForNormalization() throws IOException { + queryWeight = getBoost(); + return queryWeight * queryWeight; + } + + @Override + public void normalize(float norm, float topLevelBoost) { + this.queryNorm = norm * topLevelBoost; + queryWeight *= this.queryNorm; + } + + @Override + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + SortedDocValues values = DocValues.getSorted(context.reader(), joinField); + if (values == null) { + return null; + } + + Scorer approximationScorer = approximationWeight.scorer(context, acceptDocs); + if (approximationScorer == null) { + return null; + } + if (globalOrds != null) { + return new OrdinalMapScorer(this, values, globalOrds.getGlobalOrds(context.ord), approximationScorer, queryNorm); + } { + return new SegmentOrdinalScorer(this, values, approximationScorer, queryNorm); + } + } + + final class OrdinalMapScorer extends Scorer { + + final SortedDocValues values; + final LongValues segmentOrdToGlobalOrdLookup; + final Scorer approximationScorer; + final float score; + + int currentDocID = -1; + + public OrdinalMapScorer(Weight weight, SortedDocValues values, LongValues segmentOrdToGlobalOrdLookup, Scorer approximationScorer, float score) { + super(weight); + this.values = values; + this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup; + this.approximationScorer = approximationScorer; + this.score = score; + } + + @Override + public float score() throws IOException { + return score; + } + + @Override + public int docID() { + return currentDocID; + } + + @Override + public int nextDoc() throws IOException { + return advance(currentDocID + 1); + } + + @Override + public int advance(int target) throws IOException { + for (int docID = approximationScorer.advance(target); docID < NO_MORE_DOCS; docID = approximationScorer.nextDoc()) { + final long segmentOrd = values.getOrd(docID); + if (segmentOrd != -1) { + final long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd); + if (foundOrds.get(globalOrd)) { + return currentDocID = docID; + } + } + } + return currentDocID = NO_MORE_DOCS; + } + + @Override + public TwoPhaseIterator asTwoPhaseIterator() { + return new TwoPhaseIterator() { + @Override + public DocIdSetIterator approximation() { + return new DocIdSetIterator() { + @Override + public int docID() { + return currentDocID; + } + + @Override + public int nextDoc() throws IOException { + return currentDocID = approximationScorer.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return currentDocID = approximationScorer.advance(target); + } + + @Override + public long cost() { + return approximationScorer.cost(); + } + }; + } + + @Override + public boolean matches() throws IOException { + final long segmentOrd = values.getOrd(currentDocID); + if (segmentOrd != -1) { + final long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd); + if (foundOrds.get(globalOrd)) { + return true; + } + } + return false; + } + }; + } + + @Override + public long cost() { + return approximationScorer.cost(); + } + + @Override + public int freq() throws IOException { + return 1; + } + + } + + final class SegmentOrdinalScorer extends Scorer { + + final SortedDocValues values; + final Scorer approximationScorer; + final float score; + + int currentDocID = -1; + + public SegmentOrdinalScorer(Weight weight, SortedDocValues values, Scorer approximationScorer, float score) { + super(weight); + this.values = values; + this.approximationScorer = approximationScorer; + this.score = score; + } + + @Override + public float score() throws IOException { + return score; + } + + @Override + public int docID() { + return currentDocID; + } + + @Override + public int nextDoc() throws IOException { + return advance(currentDocID + 1); + } + + @Override + public int advance(int target) throws IOException { + for (int docID = approximationScorer.advance(target); docID < NO_MORE_DOCS; docID = approximationScorer.nextDoc()) { + final long segmentOrd = values.getOrd(docID); + if (segmentOrd != -1) { + if (foundOrds.get(segmentOrd)) { + return currentDocID = docID; + } + } + } + return currentDocID = NO_MORE_DOCS; + } + + @Override + public TwoPhaseIterator asTwoPhaseIterator() { + return new TwoPhaseIterator() { + @Override + public DocIdSetIterator approximation() { + return new DocIdSetIterator() { + @Override + public int docID() { + return currentDocID; + } + + @Override + public int nextDoc() throws IOException { + return currentDocID = approximationScorer.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return currentDocID = approximationScorer.advance(target); + } + + @Override + public long cost() { + return approximationScorer.cost(); + } + }; + } + + @Override + public boolean matches() throws IOException { + final long segmentOrd = values.getOrd(currentDocID); + if (segmentOrd != -1) { + if (foundOrds.get(segmentOrd)) { + return true; + } + } + return false; + } + }; + } + + @Override + public long cost() { + return approximationScorer.cost(); + } + + @Override + public int freq() throws IOException { + return 1; + } + + } + + } +} diff --git a/src/main/java/org/apache/lucene/join/GlobalOrdinalsWithScoreCollector.java b/src/main/java/org/apache/lucene/join/GlobalOrdinalsWithScoreCollector.java new file mode 100644 index 0000000000000..476c0d5569233 --- /dev/null +++ b/src/main/java/org/apache/lucene/join/GlobalOrdinalsWithScoreCollector.java @@ -0,0 +1,241 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.join; + +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.util.LongBitSet; +import org.apache.lucene.util.LongValues; + +import java.io.IOException; + +abstract class GlobalOrdinalsWithScoreCollector implements Collector { + + final String field; + final MultiDocValues.OrdinalMap ordinalMap; + final LongBitSet collectedOrds; + protected final Scores scores; + + GlobalOrdinalsWithScoreCollector(String field, MultiDocValues.OrdinalMap ordinalMap, long valueCount) { + if (valueCount > Integer.MAX_VALUE) { + // We simply don't support more than + throw new IllegalStateException("Can't collect more than [" + Integer.MAX_VALUE + "] ids"); + } + this.field = field; + this.ordinalMap = ordinalMap; + this.collectedOrds = new LongBitSet(valueCount); + this.scores = new Scores(); + } + + public LongBitSet getCollectorOrdinals() { + return collectedOrds; + } + + public float score(int globalOrdinal) { + return scores.getScore(globalOrdinal); + } + + protected abstract void doScore(int globalOrd, float existingScore, float newScore); + + @Override + public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { + SortedDocValues docTermOrds = DocValues.getSorted(context.reader(), field); + if (ordinalMap != null) { + LongValues segmentOrdToGlobalOrdLookup = ordinalMap.getGlobalOrds(context.ord); + return new OrdinalMapCollector(docTermOrds, segmentOrdToGlobalOrdLookup); + } else { + return new SegmentOrdinalCollector(docTermOrds); + } + } + + @Override + public boolean needsScores() { + return true; + } + + final class OrdinalMapCollector implements LeafCollector { + + private final SortedDocValues docTermOrds; + private final LongValues segmentOrdToGlobalOrdLookup; + private Scorer scorer; + + OrdinalMapCollector(SortedDocValues docTermOrds, LongValues segmentOrdToGlobalOrdLookup) { + this.docTermOrds = docTermOrds; + this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup; + } + + @Override + public void collect(int doc) throws IOException { + final long segmentOrd = docTermOrds.getOrd(doc); + if (segmentOrd != -1) { + final int globalOrd = (int) segmentOrdToGlobalOrdLookup.get(segmentOrd); + collectedOrds.set(globalOrd); + float existingScore = scores.getScore(globalOrd); + float newScore = scorer.score(); + doScore(globalOrd, existingScore, newScore); + } + } + + @Override + public void setScorer(Scorer scorer) throws IOException { + this.scorer = scorer; + } + } + + final class SegmentOrdinalCollector implements LeafCollector { + + private final SortedDocValues docTermOrds; + private Scorer scorer; + + SegmentOrdinalCollector(SortedDocValues docTermOrds) { + this.docTermOrds = docTermOrds; + } + + @Override + public void collect(int doc) throws IOException { + final int segmentOrd = docTermOrds.getOrd(doc); + if (segmentOrd != -1) { + collectedOrds.set(segmentOrd); + float existingScore = scores.getScore(segmentOrd); + float newScore = scorer.score(); + doScore(segmentOrd, existingScore, newScore); + } + } + + @Override + public void setScorer(Scorer scorer) throws IOException { + this.scorer = scorer; + } + } + + static final class Max extends GlobalOrdinalsWithScoreCollector { + + public Max(String field, MultiDocValues.OrdinalMap ordinalMap, long valueCount) { + super(field, ordinalMap, valueCount); + } + + @Override + protected void doScore(int globalOrd, float existingScore, float newScore) { + scores.setScore(globalOrd, Math.max(existingScore, newScore)); + } + + } + + static final class Sum extends GlobalOrdinalsWithScoreCollector { + + public Sum(String field, MultiDocValues.OrdinalMap ordinalMap, long valueCount) { + super(field, ordinalMap, valueCount); + } + + @Override + protected void doScore(int globalOrd, float existingScore, float newScore) { + scores.setScore(globalOrd, existingScore + newScore); + } + + } + + static final class Avg extends GlobalOrdinalsWithScoreCollector { + + private final Occurrences occurrences = new Occurrences(); + + public Avg(String field, MultiDocValues.OrdinalMap ordinalMap, long valueCount) { + super(field, ordinalMap, valueCount); + } + + @Override + protected void doScore(int globalOrd, float existingScore, float newScore) { + occurrences.increment(globalOrd); + scores.setScore(globalOrd, existingScore + newScore); + } + + @Override + public float score(int globalOrdinal) { + return scores.getScore(globalOrdinal) / occurrences.getOccurence(globalOrdinal); + } + } + + // Because the global ordinal is directly used as a key to a score we should be somewhat smart about allocation + // the scores array. Most of the times not all docs match so splitting the scores array up in blocks can prevent creation of huge arrays. + // Also working with smaller arrays is supposed to be more gc friendly + // + // At first a hash map implementation would make sense, but in the case that more than half of docs match this becomes more expensive + // then just using an array. + + // Maybe this should become a method parameter? + static final int arraySize = 4096; + + static final class Scores { + + final float[][] blocks = new float[Integer.MAX_VALUE / arraySize][]; + + public void setScore(int globalOrdinal, float score) { + int block = globalOrdinal / arraySize; + int offset = globalOrdinal % arraySize; + float[] scores = blocks[block]; + if (scores == null) { + blocks[block] = scores = new float[arraySize]; + } + scores[offset] = score; + } + + public float getScore(int globalOrdinal) { + int block = globalOrdinal / arraySize; + int offset = globalOrdinal % arraySize; + float[] scores = blocks[block]; + float score; + if (scores != null) { + score = scores[offset]; + } else { + score = 0f; + } + return score; + } + + } + + static final class Occurrences { + + final int[][] blocks = new int[Integer.MAX_VALUE / arraySize][]; + + public void increment(int globalOrdinal) { + int block = globalOrdinal / arraySize; + int offset = globalOrdinal % arraySize; + int[] occurrences = blocks[block]; + if (occurrences == null) { + blocks[block] = occurrences = new int[arraySize]; + } + occurrences[offset]++; + } + + public int getOccurence(int globalOrdinal) { + int block = globalOrdinal / arraySize; + int offset = globalOrdinal % arraySize; + int[] occurrences = blocks[block]; + return occurrences[offset]; + } + + } + +} diff --git a/src/main/java/org/apache/lucene/join/GlobalOrdinalsWithScoreQuery.java b/src/main/java/org/apache/lucene/join/GlobalOrdinalsWithScoreQuery.java new file mode 100644 index 0000000000000..518173bbf4d2f --- /dev/null +++ b/src/main/java/org/apache/lucene/join/GlobalOrdinalsWithScoreQuery.java @@ -0,0 +1,352 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.join; + +import org.apache.lucene.index.*; +import org.apache.lucene.search.*; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.LongBitSet; +import org.apache.lucene.util.LongValues; + +import java.io.IOException; +import java.util.Set; + +final class GlobalOrdinalsWithScoreQuery extends Query { + + private final GlobalOrdinalsWithScoreCollector collector; + private final String joinField; + private final MultiDocValues.OrdinalMap globalOrds; + // Is also an approximation of the docs that will match. Can be all docs that have toField or something more specific. + private final Query toQuery; + // just hashcode and equals + private final Query fromQuery; + + GlobalOrdinalsWithScoreQuery(GlobalOrdinalsWithScoreCollector collector, String joinField, MultiDocValues.OrdinalMap globalOrds, Query toQuery, Query fromQuery) { + this.collector = collector; + this.joinField = joinField; + this.globalOrds = globalOrds; + this.toQuery = toQuery; + this.fromQuery = fromQuery; + } + + @Override + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new W(this, toQuery.createWeight(searcher, false)); + } + + @Override + public void extractTerms(Set terms) { + fromQuery.extractTerms(terms); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + + GlobalOrdinalsWithScoreQuery that = (GlobalOrdinalsWithScoreQuery) o; + + if (!fromQuery.equals(that.fromQuery)) return false; + if (!joinField.equals(that.joinField)) return false; + if (!toQuery.equals(that.toQuery)) return false; + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + joinField.hashCode(); + result = 31 * result + toQuery.hashCode(); + result = 31 * result + fromQuery.hashCode(); + return result; + } + + @Override + public String toString(String field) { + return "GlobalOrdinalsQuery{" + + "joinField=" + joinField + + '}'; + } + + final class W extends Weight { + + private final Weight approximationWeight; + private final LongBitSet foundOrds; + + private float queryNorm; + private float queryWeight; + + W(Query query, Weight approximationWeight) { + super(query); + this.approximationWeight = approximationWeight; + this.foundOrds = collector.getCollectorOrdinals(); + } + + @Override + public Explanation explain(LeafReaderContext context, int doc) throws IOException { + SortedDocValues values = DocValues.getSorted(context.reader(), joinField); + if (values != null) { + int segmentOrd = values.getOrd(doc); + if (segmentOrd != -1) { + final float score; + if (globalOrds != null) { + long globalOrd = globalOrds.getGlobalOrds(context.ord).get(segmentOrd); + score = collector.scores.getScore((int) globalOrd); + } else { + score = collector.score(segmentOrd); + } + BytesRef joinValue = values.lookupOrd(segmentOrd); + return new ComplexExplanation(true, score, "Score based on join value " + joinValue.utf8ToString()); + } + } + return new ComplexExplanation(false, 0.0f, "Not a match"); + } + + @Override + public float getValueForNormalization() throws IOException { + queryWeight = getBoost(); + return queryWeight * queryWeight; + } + + @Override + public void normalize(float norm, float topLevelBoost) { + this.queryNorm = norm * topLevelBoost; + queryWeight *= this.queryNorm; + } + + @Override + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + SortedDocValues values = DocValues.getSorted(context.reader(), joinField); + if (values == null) { + return null; + } + + Scorer approximationScorer = approximationWeight.scorer(context, acceptDocs); + if (approximationScorer == null) { + return null; + } + if (globalOrds != null) { + return new OrdinalMapScorer(this, values, globalOrds.getGlobalOrds(context.ord), approximationScorer); + } else { + return new SegmentOrdinalScorer(this, values, approximationScorer); + } + } + + final class OrdinalMapScorer extends Scorer { + + final SortedDocValues values; + final LongValues segmentOrdToGlobalOrdLookup; + final Scorer approximationScorer; + + float score; + int currentDocID = -1; + + public OrdinalMapScorer(Weight weight, SortedDocValues values, LongValues segmentOrdToGlobalOrdLookup, Scorer approximationScorer) { + super(weight); + this.values = values; + this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup; + this.approximationScorer = approximationScorer; + } + + @Override + public float score() throws IOException { + return score; + } + + @Override + public int docID() { + return currentDocID; + } + + @Override + public int nextDoc() throws IOException { + return advance(currentDocID + 1); + } + + @Override + public int advance(int target) throws IOException { + for (int docID = approximationScorer.advance(target); docID < NO_MORE_DOCS; docID = approximationScorer.nextDoc()) { + final long segmentOrd = values.getOrd(docID); + if (segmentOrd != -1) { + final long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd); + if (foundOrds.get(globalOrd)) { + score = collector.score((int) globalOrd); + return currentDocID = docID; + } + } + } + return currentDocID = NO_MORE_DOCS; + } + + @Override + public TwoPhaseIterator asTwoPhaseIterator() { + return new TwoPhaseIterator() { + @Override + public DocIdSetIterator approximation() { + return new DocIdSetIterator() { + @Override + public int docID() { + return currentDocID; + } + + @Override + public int nextDoc() throws IOException { + return currentDocID = approximationScorer.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return currentDocID = approximationScorer.advance(target); + } + + @Override + public long cost() { + return approximationScorer.cost(); + } + }; + } + + @Override + public boolean matches() throws IOException { + final long segmentOrd = values.getOrd(currentDocID); + if (segmentOrd != -1) { + final long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd); + if (foundOrds.get(globalOrd)) { + return true; + } + } + return false; + } + }; + } + + @Override + public long cost() { + return approximationScorer.cost(); + } + + @Override + public int freq() throws IOException { + return 1; + } + + } + + final class SegmentOrdinalScorer extends Scorer { + + final SortedDocValues values; + final Scorer approximationScorer; + + float score; + int currentDocID = -1; + + public SegmentOrdinalScorer(Weight weight, SortedDocValues values, Scorer approximationScorer) { + super(weight); + this.values = values; + this.approximationScorer = approximationScorer; + } + + @Override + public float score() throws IOException { + return score; + } + + @Override + public int docID() { + return currentDocID; + } + + @Override + public int nextDoc() throws IOException { + return advance(currentDocID + 1); + } + + @Override + public int advance(int target) throws IOException { + for (int docID = approximationScorer.advance(target); docID < NO_MORE_DOCS; docID = approximationScorer.nextDoc()) { + final int segmentOrd = values.getOrd(docID); + if (segmentOrd != -1) { + if (foundOrds.get(segmentOrd)) { + score = collector.score(segmentOrd); + return currentDocID = docID; + } + } + } + return currentDocID = NO_MORE_DOCS; + } + + @Override + public TwoPhaseIterator asTwoPhaseIterator() { + return new TwoPhaseIterator() { + @Override + public DocIdSetIterator approximation() { + return new DocIdSetIterator() { + @Override + public int docID() { + return currentDocID; + } + + @Override + public int nextDoc() throws IOException { + return currentDocID = approximationScorer.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return currentDocID = approximationScorer.advance(target); + } + + @Override + public long cost() { + return approximationScorer.cost(); + } + }; + } + + @Override + public boolean matches() throws IOException { + final int segmentOrd = values.getOrd(currentDocID); + if (segmentOrd != -1) { + if (foundOrds.get(segmentOrd)) { + return true; + } + } + return false; + } + }; + } + + @Override + public long cost() { + return approximationScorer.cost(); + } + + @Override + public int freq() throws IOException { + return 1; + } + + } + + } +} diff --git a/src/main/java/org/apache/lucene/join/JoinUtil.java b/src/main/java/org/apache/lucene/join/JoinUtil.java new file mode 100644 index 0000000000000..e60800f5eff5f --- /dev/null +++ b/src/main/java/org/apache/lucene/join/JoinUtil.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.join; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.common.lucene.search.MatchNoDocsQuery; + +import java.io.IOException; +import java.util.Locale; + +/** + * Utility for query time joining using TermsQuery and TermsCollector. + * + * @lucene.experimental + */ +public final class JoinUtil { + + // No instances allowed + private JoinUtil() { + } + + /** + * A query time join using global ordinals over a dedicated join field. + * + * This join has certain restrictions and requirements: + * 1) A document can only refer to one other document. (but can be referred by one or more documents) + * 2) Documents on each side of the join must be distinguishable. Typically this can be done by adding an extra field + * that identifies the "from" and "to" side and then the fromQuery and toQuery must take the this into account. + * 3) There must be a single sorted doc values join field used by both the "from" and "to" documents. + * 4) An ordinal map must be provided that is created on top of the join field. + * + * @param joinField The {@link org.apache.lucene.index.SortedDocValues} field containing the join values + * @param fromQuery The query containing the actual user query. Also the fromQuery can only match "from" documents. + * @param toQuery The query identifying all documents on the "to" side. + * @param searcher The index searcher used to execute the from query + * @param scoreMode Instructs how scores from the fromQuery are mapped to the returned query + * @param ordinalMap The ordinal map constructed over the joinField + * @return a {@link org.apache.lucene.search.Query} instance that can be used to join documents based on the join field + * @throws java.io.IOException If I/O related errors occur + */ + public static Query createJoinQuery(String joinField, + Query fromQuery, + Query toQuery, + IndexSearcher searcher, + ScoreMode scoreMode, + MultiDocValues.OrdinalMap ordinalMap) throws IOException { + IndexReader indexReader = searcher.getIndexReader(); + int numSegments = indexReader.leaves().size(); + final long valueCount; + if (numSegments == 0) { + return new MatchNoDocsQuery(); + } else if (numSegments == 1) { + // No need to use the ordinal map, because there is just one segment. + ordinalMap = null; + LeafReader leafReader = searcher.getIndexReader().leaves().get(0).reader(); + SortedDocValues joinSortedDocValues = leafReader.getSortedDocValues(joinField); + if (joinSortedDocValues != null) { + valueCount = joinSortedDocValues.getValueCount(); + } else { + return new MatchNoDocsQuery(); + } + } else { + if (ordinalMap == null) { + throw new IllegalArgumentException("OrdinalMap is required, because there is more than 1 segment"); + } + valueCount = ordinalMap.getValueCount(); + } + + Query rewrittenFromQuery = fromQuery.rewrite(indexReader); + if (scoreMode == ScoreMode.None) { + GlobalOrdinalsCollector globalOrdinalsCollector = new GlobalOrdinalsCollector(joinField, ordinalMap, valueCount); + searcher.search(fromQuery, globalOrdinalsCollector); + return new GlobalOrdinalsQuery(globalOrdinalsCollector.getCollectorOrdinals(), joinField, ordinalMap, toQuery, rewrittenFromQuery); + } + + GlobalOrdinalsWithScoreCollector globalOrdinalsWithScoreCollector; + switch (scoreMode) { + case Total: + globalOrdinalsWithScoreCollector = new GlobalOrdinalsWithScoreCollector.Sum(joinField, ordinalMap, valueCount); + break; + case Max: + globalOrdinalsWithScoreCollector = new GlobalOrdinalsWithScoreCollector.Max(joinField, ordinalMap, valueCount); + break; + case Avg: + globalOrdinalsWithScoreCollector = new GlobalOrdinalsWithScoreCollector.Avg(joinField, ordinalMap, valueCount); + break; + default: + throw new IllegalArgumentException(String.format(Locale.ROOT, "Score mode %s isn't supported.", scoreMode)); + } + searcher.search(fromQuery, globalOrdinalsWithScoreCollector); + return new GlobalOrdinalsWithScoreQuery(globalOrdinalsWithScoreCollector, joinField, ordinalMap, toQuery, rewrittenFromQuery); + } + +} diff --git a/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java b/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java index fbc620a318cca..1f1a152749c14 100644 --- a/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java +++ b/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java @@ -203,6 +203,7 @@ void onFirstPhaseResult(final int shardIndex, @Nullable ShardRouting shard, @Nul // we do make sure to clean it on a successful response from a shard SearchShardTarget shardTarget = new SearchShardTarget(nodeId, shardIt.shardId().getIndex(), shardIt.shardId().getId()); addShardFailure(shardIndex, shardTarget, t); + t.printStackTrace(); if (totalOps.incrementAndGet() == expectedTotalOps) { if (logger.isDebugEnabled()) { diff --git a/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java b/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java index 6b2cd15a18d2b..c6f1fdf4d27c6 100644 --- a/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java +++ b/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java @@ -52,6 +52,10 @@ public Index index() { return this.index; } + public Settings indexSettings() { + return indexSettings; + } + public String nodeName() { return indexSettings.get("name", ""); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java index 98cc061f05aa8..40687d00bb27b 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java @@ -21,16 +21,11 @@ import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.*; import org.apache.lucene.index.MultiDocValues.OrdinalMap; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.SortedDocValues; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; @@ -40,6 +35,7 @@ import org.apache.lucene.util.packed.PackedLongValues; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalStateException; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -115,16 +111,54 @@ public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, Mu } @Override - public ParentChildAtomicFieldData loadDirect(LeafReaderContext context) throws Exception { + public AbstractAtomicParentChildFieldData loadDirect(LeafReaderContext context) throws Exception { LeafReader reader = context.reader(); final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat( "acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO ); + final NavigableSet parentTypes; synchronized (lock) { parentTypes = ImmutableSortedSet.copyOf(BytesRef.getUTF8SortedAsUnicodeComparator(), this.parentTypes); } + if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0)) { + final ImmutableMap.Builder builder = ImmutableMap.builder(); + for (BytesRef parentType : parentTypes) { + SortedDocValues docValues = DocValues.getSorted(reader, ParentFieldMapper.joinField(parentType.utf8ToString())); + builder.put(parentType.utf8ToString(), docValues); + } + return new AbstractAtomicParentChildFieldData() { + + private final ImmutableMap typeToJoinField = builder.build(); + + @Override + public Set types() { + return typeToJoinField.keySet(); + } + + @Override + public SortedDocValues getOrdinalsValues(String type) { + return typeToJoinField.get(type); + } + + @Override + public long ramBytesUsed() { + // unknown + return 0; + } + + @Override + public Collection getChildResources() { + return Collections.emptyList(); + } + + @Override + public void close() throws ElasticsearchException { + } + }; + } + boolean success = false; ParentChildAtomicFieldData data = null; ParentChildFilteredTermsEnum termsEnum = new ParentChildFilteredTermsEnum( @@ -330,12 +364,14 @@ public IndexParentChildFieldData localGlobalDirect(IndexReader indexReader) thro long ramBytesUsed = 0; final Map perType = new HashMap<>(); + final Map ordinalMapPerType = new HashMap<>(); for (String type : parentTypes) { final AtomicParentChildFieldData[] fieldData = new AtomicParentChildFieldData[indexReader.leaves().size()]; for (LeafReaderContext context : indexReader.leaves()) { fieldData[context.ord] = load(context); } final OrdinalMap ordMap = buildOrdinalMap(fieldData, type); + ordinalMapPerType.put(type, ordMap); ramBytesUsed += ordMap.ramBytesUsed(); perType.put(type, new OrdinalMapAndAtomicFieldData(ordMap, fieldData)); } @@ -353,7 +389,7 @@ public IndexParentChildFieldData localGlobalDirect(IndexReader indexReader) thro ); } - return new GlobalFieldData(indexReader, fielddata, ramBytesUsed); + return new GlobalFieldData(indexReader, fielddata, ramBytesUsed, ordinalMapPerType); } private static class GlobalAtomicFieldData extends AbstractAtomicParentChildFieldData { @@ -437,16 +473,18 @@ public void close() throws ElasticsearchException { } - private class GlobalFieldData implements IndexParentChildFieldData, Accountable { + public class GlobalFieldData implements IndexParentChildFieldData, Accountable { private final AtomicParentChildFieldData[] fielddata; private final IndexReader reader; private final long ramBytesUsed; + private final Map ordinalMapPerType; - GlobalFieldData(IndexReader reader, AtomicParentChildFieldData[] fielddata, long ramBytesUsed) { + GlobalFieldData(IndexReader reader, AtomicParentChildFieldData[] fielddata, long ramBytesUsed, Map ordinalMapPerType) { this.reader = reader; this.ramBytesUsed = ramBytesUsed; this.fielddata = fielddata; + this.ordinalMapPerType = ordinalMapPerType; } @Override @@ -513,6 +551,11 @@ public IndexParentChildFieldData localGlobalDirect(IndexReader indexReader) thro return loadGlobal(indexReader); } + // TODO: Need to find a better way to expose the OrdinalMap... + public OrdinalMap getOrdinalMap(String type) { + return ordinalMapPerType.get(type); + } + } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 41a841ffbc108..ef8ea9485fb8b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -19,9 +19,9 @@ package org.elasticsearch.index.mapper.internal; import com.google.common.base.Objects; - import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermFilter; @@ -39,16 +39,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.InternalMapper; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeContext; -import org.elasticsearch.index.mapper.MergeMappingException; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.RootMapper; -import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.query.QueryParseContext; @@ -91,6 +84,7 @@ public static class Builder extends Mapper.Builder { protected String indexName; + private boolean parent; private String type; protected Settings fieldDataSettings; @@ -100,6 +94,11 @@ public Builder() { builder = this; } + public Builder parent(boolean parent) { + this.parent = parent; + return this; + } + public Builder type(String type) { this.type = type; return builder; @@ -112,10 +111,10 @@ public Builder fieldDataSettings(Settings settings) { @Override public ParentFieldMapper build(BuilderContext context) { - if (type == null) { + if (!parent && type == null) { throw new MapperParsingException("Parent mapping must contain the parent type"); } - return new ParentFieldMapper(name, indexName, type, fieldDataSettings, context.indexSettings()); + return new ParentFieldMapper(name, indexName, type, fieldDataSettings, context.indexSettings(), parent, context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)); } } @@ -130,6 +129,9 @@ public Mapper.Builder parse(String name, Map node, ParserContext if (fieldName.equals("type")) { builder.type(fieldNode.toString()); iterator.remove(); + } else if (fieldName.equals("parent")) { + builder.parent(XContentMapValues.nodeBooleanValue(fieldNode)); + iterator.remove(); } else if (fieldName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { // ignore before 2.0, reject on and after 2.0 iterator.remove(); @@ -147,18 +149,22 @@ public Mapper.Builder parse(String name, Map node, ParserContext } } + private final boolean parent; private final String type; private final BytesRef typeAsBytes; + private final boolean docValuesJoin; - protected ParentFieldMapper(String name, String indexName, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) { + protected ParentFieldMapper(String name, String indexName, String type, @Nullable Settings fieldDataSettings, Settings indexSettings, boolean parent, boolean docValuesJoin) { super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); this.type = type; + this.parent = parent; + this.docValuesJoin = docValuesJoin; this.typeAsBytes = type == null ? null : new BytesRef(type); } public ParentFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, null, null, indexSettings); + this(Defaults.NAME, Defaults.NAME, null, null, indexSettings, false, Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0)); this.fieldDataType = new FieldDataType("_parent", settingsBuilder().put(Loading.KEY, Loading.LAZY_VALUE)); } @@ -178,7 +184,7 @@ public FieldDataType defaultFieldDataType() { @Override public boolean hasDocValues() { - return false; + return docValuesJoin; } @Override @@ -197,6 +203,12 @@ public boolean includeInObject() { @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { + System.out.println("adding join field for doc with id=" + context.id()); + if (parent) { + assert docValuesJoin; + fields.add(createJoinField(context.type(), context.id())); + } + if (!active()) { return; } @@ -206,6 +218,9 @@ protected void parseCreateField(ParseContext context, List fields) throws String parentId = context.parser().text(); context.sourceToParse().parent(parentId); fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); + if (docValuesJoin) { + fields.add(createJoinField(type, parentId)); + } } else { // otherwise, we are running it post processing of the xcontent String parsedParentId = context.doc().get(Defaults.NAME); @@ -217,6 +232,9 @@ protected void parseCreateField(ParseContext context, List fields) throws } // we did not add it in the parsing phase, add it now fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); + if (docValuesJoin) { + fields.add(createJoinField(type, parentId)); + } } else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), type, parentId))) { throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]"); } @@ -225,6 +243,16 @@ protected void parseCreateField(ParseContext context, List fields) throws // we have parent mapping, yet no value was set, ignore it... } + private SortedDocValuesField createJoinField(String parentType, String id) { + String joinField = joinField(parentType); + System.out.println("write joinField=" + joinField); + return new SortedDocValuesField(joinField, new BytesRef(id)); + } + + public static String joinField(String parentType) { + return ParentFieldMapper.NAME + "#" + parentType; + } + @Override public Uid value(Object value) { if (value == null) { @@ -351,13 +379,18 @@ protected String contentType() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (!active()) { + if (!parent && !active()) { return builder; } boolean includeDefaults = params.paramAsBoolean("include_defaults", false); builder.startObject(CONTENT_TYPE); - builder.field("type", type); + if (type != null) { + builder.field("type", type); + } + if (parent) { + builder.field("parent", parent); + } if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { diff --git a/src/main/java/org/elasticsearch/index/query/HasChildFilterParser.java b/src/main/java/org/elasticsearch/index/query/HasChildFilterParser.java index 0f83e3408bb4c..205eb78630dd4 100644 --- a/src/main/java/org/elasticsearch/index/query/HasChildFilterParser.java +++ b/src/main/java/org/elasticsearch/index/query/HasChildFilterParser.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; @@ -41,6 +42,7 @@ import java.io.IOException; +import static org.elasticsearch.index.query.HasChildQueryParser.joinUtilHelper; import static org.elasticsearch.index.query.QueryParserUtils.ensureNotDeleteByQuery; /** @@ -172,12 +174,16 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null, parseContext.autoFilterCachePolicy()); ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper); - Query childrenQuery; - if (minChildren > 1 || maxChildren > 0) { - childrenQuery = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter,query,ScoreType.NONE,minChildren, maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter); + final Query childrenQuery; + if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) { + childrenQuery = joinUtilHelper(parentType, parentChildIndexFieldData, parentFilter, ScoreType.NONE, query); } else { - childrenQuery = new ChildrenConstantScoreQuery(parentChildIndexFieldData, query, parentType, childType, parentFilter, - shortCircuitParentDocSet, nonNestedDocsFilter); + if (minChildren > 1 || maxChildren > 0) { + childrenQuery = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter,query,ScoreType.NONE,minChildren, maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter); + } else { + childrenQuery = new ChildrenConstantScoreQuery(parentChildIndexFieldData, query, parentType, childType, parentFilter, + shortCircuitParentDocSet, nonNestedDocsFilter); + } } if (filterName != null) { parseContext.addNamedFilter(filterName, new CustomQueryWrappingFilter(childrenQuery)); diff --git a/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java b/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java index c7e8f2567d41e..3dad4a14f8a0c 100644 --- a/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java @@ -19,14 +19,17 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Filter; -import org.apache.lucene.search.FilteredQuery; -import org.apache.lucene.search.Query; +import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.join.JoinUtil; +import org.apache.lucene.search.*; import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.fielddata.IndexParentChildFieldData; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; @@ -38,6 +41,7 @@ import org.elasticsearch.index.search.child.ScoreType; import org.elasticsearch.index.search.nested.NonNestedDocsFilter; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SubSearchContext; import java.io.IOException; @@ -171,15 +175,19 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars // wrap the query with type query innerQuery = new FilteredQuery(innerQuery, parseContext.cacheFilter(childDocMapper.typeFilter(), null, parseContext.autoFilterCachePolicy())); - Query query; - Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null, parseContext.autoFilterCachePolicy()); - ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper); - if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) { - query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren, - maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter); + final Query query; + final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper); + final Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null, parseContext.autoFilterCachePolicy()); + if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) { + query = joinUtilHelper(parentType, parentChildIndexFieldData, parentFilter, scoreType, innerQuery); } else { - query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, childType, parentFilter, - shortCircuitParentDocSet, nonNestedDocsFilter); + if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) { + query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren, + maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter); + } else { + query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, childType, parentFilter, + shortCircuitParentDocSet, nonNestedDocsFilter); + } } if (queryName != null) { parseContext.addNamedFilter(queryName, new CustomQueryWrappingFilter(query)); @@ -187,4 +195,42 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars query.setBoost(boost); return query; } + + public static Query joinUtilHelper(String parentType, ParentChildIndexFieldData parentChildIndexFieldData, Filter toFilter, ScoreType scoreType, Query innerQuery) throws IOException { + String joinField = ParentFieldMapper.joinField(parentType); + BooleanQuery toQuery = new BooleanQuery(); + toQuery.add(toFilter, BooleanClause.Occur.FILTER); + SearchContext searchContext = SearchContext.current(); + ScoreMode scoreMode; + // TODO: grrr... we should move over to org.apache.lucene.join.ScoreMode, but we can do that when all our ScoreTypes are in the join module + switch (scoreType) { + case NONE: + scoreMode = ScoreMode.None; + break; + case MIN: + throw new UnsupportedOperationException("score type min not supported"); + case MAX: + scoreMode = ScoreMode.Max; + break; + case SUM: + scoreMode = ScoreMode.Total; + break; + case AVG: + scoreMode = ScoreMode.Avg; + break; + default: + throw new UnsupportedOperationException("score type [" + scoreType + "] not supported"); + } + IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(searchContext.searcher().getIndexReader()); + MultiDocValues.OrdinalMap ordinalMap; + if (indexParentChildFieldData instanceof ParentChildIndexFieldData.GlobalFieldData) { + // TODO: find a nicer way? + ordinalMap = ((ParentChildIndexFieldData.GlobalFieldData) indexParentChildFieldData).getOrdinalMap(parentType); + } else { + // one segment, local ordinals are global + ordinalMap = null; + } + System.out.println("read joinField=" + joinField); + return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, searchContext.searcher(), scoreMode, ordinalMap); + } } diff --git a/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java b/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java index a44d5a699177f..b6a679ef8f7fa 100644 --- a/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.Filter; import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; @@ -36,6 +37,7 @@ import org.elasticsearch.index.search.child.CustomQueryWrappingFilter; import org.elasticsearch.index.search.child.ParentConstantScoreQuery; import org.elasticsearch.index.search.child.ParentQuery; +import org.elasticsearch.index.search.child.ScoreType; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.internal.SubSearchContext; @@ -43,6 +45,7 @@ import java.util.HashSet; import java.util.Set; +import static org.elasticsearch.index.query.HasChildQueryParser.joinUtilHelper; import static org.elasticsearch.index.query.QueryParserUtils.ensureNotDeleteByQuery; public class HasParentQueryParser implements QueryParser { @@ -144,7 +147,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars return query; } - static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, Tuple innerHits) { + static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, Tuple innerHits) throws IOException { DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType); if (parentDocMapper == null) { throw new QueryParsingException(parseContext.index(), "[has_parent] query configured 'parent_type' [" + parentType + "] is not a valid type"); @@ -198,10 +201,15 @@ static Query createParentQuery(Query innerQuery, String parentType, boolean scor // wrap the query with type query innerQuery = new FilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null, parseContext.autoFilterCachePolicy())); Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null, parseContext.autoFilterCachePolicy()); - if (score) { - return new ParentQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter); + if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) { + ScoreType scoreMode = score ? ScoreType.MAX : ScoreType.NONE; + return joinUtilHelper(parentType, parentChildIndexFieldData, childrenFilter, scoreMode, innerQuery); } else { - return new ParentConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter); + if (score) { + return new ParentQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter); + } else { + return new ParentConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter); + } } } diff --git a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index fe6292735ca70..b2dce20ff94ee 100644 --- a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -21,7 +21,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queryparser.classic.MapperQueryParser; import org.apache.lucene.queryparser.classic.QueryParserSettings; @@ -48,12 +47,7 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.query.parser.QueryParserCache; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMappers; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MapperBuilders; -import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.search.child.CustomQueryWrappingFilter; @@ -93,6 +87,8 @@ public static void removeTypes() { private final Index index; + private final Version indexVersionCreated; + private boolean propagateNoCache = false; private boolean requireCustomQueryWrappingFilter = false; @@ -121,6 +117,7 @@ public QueryParseContext(Index index, IndexQueryParserService indexQueryParser) public QueryParseContext(Index index, IndexQueryParserService indexQueryParser, boolean disableFilterCaching) { this.index = index; + this.indexVersionCreated = Version.indexCreated(indexQueryParser.indexSettings()); this.indexQueryParser = indexQueryParser; this.propagateNoCache = disableFilterCaching; this.disableFilterCaching = disableFilterCaching; @@ -481,4 +478,8 @@ public boolean requireCustomQueryWrappingFilter() { public NestedScope nestedScope() { return nestedScope; } + + public Version indexVersionCreated() { + return indexVersionCreated; + } } diff --git a/src/main/java/org/elasticsearch/index/query/TopChildrenQueryBuilder.java b/src/main/java/org/elasticsearch/index/query/TopChildrenQueryBuilder.java index a8174230db932..011f681722769 100644 --- a/src/main/java/org/elasticsearch/index/query/TopChildrenQueryBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/TopChildrenQueryBuilder.java @@ -25,6 +25,7 @@ /** * */ +@Deprecated public class TopChildrenQueryBuilder extends BaseQueryBuilder implements BoostableQueryBuilder { private final QueryBuilder queryBuilder; diff --git a/src/main/java/org/elasticsearch/index/query/TopChildrenQueryParser.java b/src/main/java/org/elasticsearch/index/query/TopChildrenQueryParser.java index a0f5834b44eba..92ed6d1937e6f 100644 --- a/src/main/java/org/elasticsearch/index/query/TopChildrenQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/TopChildrenQueryParser.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; @@ -40,6 +41,7 @@ /** * */ +@Deprecated public class TopChildrenQueryParser implements QueryParser { public static final String NAME = "top_children"; @@ -58,6 +60,11 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars ensureNotDeleteByQuery(NAME, parseContext); XContentParser parser = parseContext.parser(); + // TODO: maybe remove entirely? + if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) { + throw new UnsupportedOperationException("the [top_children] query has been dropped for indices created on or after 2.0"); + } + boolean queryFound = false; float boost = 1.0f; String childType = null; diff --git a/src/main/java/org/elasticsearch/index/search/child/TopChildrenQuery.java b/src/main/java/org/elasticsearch/index/search/child/TopChildrenQuery.java index e75878ca94ef6..09de2830c2141 100644 --- a/src/main/java/org/elasticsearch/index/search/child/TopChildrenQuery.java +++ b/src/main/java/org/elasticsearch/index/search/child/TopChildrenQuery.java @@ -54,6 +54,7 @@ * This query is most of the times faster than the {@link ChildrenQuery}. Usually enough parent documents can be returned * in the first child document query round. */ +@Deprecated public class TopChildrenQuery extends Query { private static final ParentDocComparator PARENT_DOC_COMP = new ParentDocComparator(); diff --git a/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 538a54cd58d80..b8c63941accf2 100644 --- a/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -156,6 +156,7 @@ public void execute(SearchContext searchContext) throws QueryPhaseExecutionExcep } searchContext.queryResult().topDocs(topDocs); } catch (Throwable e) { + e.printStackTrace(); throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e); } finally { searchContext.searcher().finishStage(ContextIndexSearcher.Stage.MAIN_QUERY); diff --git a/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchBwcTests.java b/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchBwcTests.java new file mode 100644 index 0000000000000..63302018dfd04 --- /dev/null +++ b/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchBwcTests.java @@ -0,0 +1,2720 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.child; + +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchIllegalArgumentException; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.action.count.CountResponse; +import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse; +import org.elasticsearch.action.explain.ExplainResponse; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.settings.ImmutableSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.cache.filter.AutoFilterCachingPolicy; +import org.elasticsearch.index.cache.filter.FilterCacheModule; +import org.elasticsearch.index.cache.filter.weighted.WeightedFilterCache; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.query.FilterBuilder; +import org.elasticsearch.index.query.FilterBuilders; +import org.elasticsearch.index.query.HasChildFilterBuilder; +import org.elasticsearch.index.query.HasChildQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.search.child.ScoreType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.global.Global; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.sort.SortBuilders; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.ElasticsearchIntegrationTest; +import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; +import org.elasticsearch.test.ElasticsearchIntegrationTest.Scope; +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static com.google.common.collect.Maps.newHashMap; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.common.settings.ImmutableSettings.builder; +import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.FilterBuilders.boolFilter; +import static org.elasticsearch.index.query.FilterBuilders.hasParentFilter; +import static org.elasticsearch.index.query.FilterBuilders.matchAllFilter; +import static org.elasticsearch.index.query.FilterBuilders.notFilter; +import static org.elasticsearch.index.query.FilterBuilders.queryFilter; +import static org.elasticsearch.index.query.FilterBuilders.termFilter; +import static org.elasticsearch.index.query.FilterBuilders.termsFilter; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; +import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery; +import static org.elasticsearch.index.query.QueryBuilders.idsQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; +import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.topChildrenQuery; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.factorFunction; +import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; + +/** + * + */ +@ClusterScope(scope = Scope.SUITE) +public class SimpleChildQuerySearchBwcTests extends ElasticsearchIntegrationTest { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return ImmutableSettings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) + // aggressive filter caching so that we can assert on the filter cache size + .put(FilterCacheModule.FilterCacheSettings.FILTER_CACHE_TYPE, WeightedFilterCache.class) + .put(AutoFilterCachingPolicy.AGGRESSIVE_CACHING_SETTINGS) + .build(); + } + + @Override + public Settings indexSettings() { + return ImmutableSettings.builder() + .put(super.indexSettings()).put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_6_0).build(); + } + + @Test + public void multiLevelChild() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent") + .addMapping("grandchild", "_parent", "type=child")); + ensureGreen(); + + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "c_value1").setParent("p1").get(); + client().prepareIndex("test", "grandchild", "gc1").setSource("gc_field", "gc_value1") + .setParent("c1").setRouting("p1").get(); + refresh(); + + SearchResponse searchResponse = client() + .prepareSearch("test") + .setQuery( + filteredQuery( + matchAllQuery(), + hasChildFilter( + "child", + filteredQuery(termQuery("c_field", "c_value1"), + hasChildFilter("grandchild", termQuery("gc_field", "gc_value1")))))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), hasParentFilter("parent", termFilter("p_field", "p_value1")))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1")); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), hasParentFilter("child", termFilter("c_field", "c_value1")))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("gc1")); + + searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1")); + + searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("gc1")); + } + + @Test + // see #6722 + public void test6722() throws ElasticsearchException, IOException { + assertAcked(prepareCreate("test") + .addMapping("foo") + .addMapping("test", "_parent", "type=foo")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "foo", "1").setSource("foo", 1).get(); + client().prepareIndex("test", "test", "2").setSource("foo", 1).setParent("1").get(); + refresh(); + String query = copyToStringFromClasspath("/org/elasticsearch/search/child/bool-query-with-empty-clauses.json"); + SearchResponse searchResponse = client().prepareSearch("test").setSource(query).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); + } + + @Test + // see #2744 + public void test2744() throws ElasticsearchException, IOException { + assertAcked(prepareCreate("test") + .addMapping("foo") + .addMapping("test", "_parent", "type=foo")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "foo", "1").setSource("foo", 1).get(); + client().prepareIndex("test", "test").setSource("foo", 1).setParent("1").get(); + refresh(); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1")); + + } + + @Test + public void simpleChildQuery() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + refresh(); + + // TEST FETCHING _parent from child + SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").ids("c1")).addFields("_parent").execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1")); + assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1")); + + // TEST matching on parent + searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent", "p1")).addFields("_parent").get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); + assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1")); + assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2"))); + assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1")); + + searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent:p1")).addFields("_parent").get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); + assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1")); + assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2"))); + assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1")); + + // TOP CHILDREN QUERY + searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "yellow"))).execute() + .actionGet(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + + searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "blue"))) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2")); + + searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "red"))).execute() + .actionGet(); + assertHitCount(searchResponse, 2l); + assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1"))); + + // HAS CHILD + searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + + searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "blue")).execute() + .actionGet(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2")); + + searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "red")).get(); + assertHitCount(searchResponse, 2l); + assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1"))); + + // HAS PARENT + searchResponse = client().prepareSearch("test") + .setQuery(randomHasParent("parent", "p_field", "p_value2")).get(); + assertNoFailures(searchResponse); + assertHitCount(searchResponse, 2l); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c3")); + assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c4")); + + searchResponse = client().prepareSearch("test") + .setQuery(randomHasParent("parent", "p_field", "p_value1")).get(); + assertHitCount(searchResponse, 2l); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1")); + assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c2")); + } + + @Test + @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/9270") + public void testClearIdCacheBug() throws Exception { + // enforce lazy loading to make sure that p/c stats are not counted as part of field data + assertAcked(prepareCreate("test") + .setSettings(ImmutableSettings.builder().put(indexSettings()) + .put("index.refresh_interval", -1)) // Disable automatic refresh, so that the _parent doesn't get warmed + .addMapping("parent", XContentFactory.jsonBuilder().startObject().startObject("parent") + .startObject("properties") + .startObject("p_field") + .field("type", "string") + .startObject("fielddata") + .field(FieldDataType.FORMAT_KEY, Loading.LAZY) + .endObject() + .endObject() + .endObject().endObject().endObject())); + + ensureGreen(); + + client().prepareIndex("test", "parent", "p0").setSource("p_field", "p_value0").get(); + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + + refresh(); + // No _parent field yet, there shouldn't be anything in the parent id cache + IndicesStatsResponse indicesStatsResponse = client().admin().indices() + .prepareStats("test").setIdCache(true).get(); + assertThat(indicesStatsResponse.getTotal().getIdCache().getMemorySizeInBytes(), equalTo(0l)); + + // Now add mapping + children + client().admin().indices().preparePutMapping("test").setType("child") + .setSource(XContentFactory.jsonBuilder().startObject().startObject("child") + .startObject("_parent") + .field("type", "parent") + .endObject() + .startObject("properties") + .startObject("c_field") + .field("type", "string") + .startObject("fielddata") + .field(FieldDataType.FORMAT_KEY, Loading.LAZY) + .endObject() + .endObject() + .endObject().endObject().endObject()) + .get(); + + // index simple data + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + + refresh(); + + indicesStatsResponse = client().admin().indices() + .prepareStats("test").setFieldData(true).get(); + // automatic warm-up has populated the cache since it found a parent field mapper + assertThat(indicesStatsResponse.getTotal().getIdCache().getMemorySizeInBytes(), greaterThan(0l)); + // Even though p/c is field data based the stats stay zero, because _parent field data field is kept + // track of under id cache stats memory wise for bwc + assertThat(indicesStatsResponse.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l)); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + + indicesStatsResponse = client().admin().indices() + .prepareStats("test").setFieldData(true).get(); + assertThat(indicesStatsResponse.getTotal().getIdCache().getMemorySizeInBytes(), greaterThan(0l)); + assertThat(indicesStatsResponse.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l)); + + ClearIndicesCacheResponse clearCacheResponse = client().admin().indices().prepareClearCache("test").setIdCache(true).get(); + assertNoFailures(clearCacheResponse); + assertAllSuccessful(clearCacheResponse); + indicesStatsResponse = client().admin().indices() + .prepareStats("test").setFieldData(true).get(); + assertThat(indicesStatsResponse.getTotal().getIdCache().getMemorySizeInBytes(), equalTo(0l)); + assertThat(indicesStatsResponse.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l)); + } + + @Test + // See: https://github.com/elasticsearch/elasticsearch/issues/3290 + public void testCachingBug_withFqueryFilter() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + List builders = new ArrayList<>(); + // index simple data + for (int i = 0; i < 10; i++) { + builders.add(client().prepareIndex("test", "parent", Integer.toString(i)).setSource("p_field", i)); + } + indexRandom(randomBoolean(), builders); + builders.clear(); + for (int j = 0; j < 2; j++) { + for (int i = 0; i < 10; i++) { + builders.add(client().prepareIndex("test", "child", Integer.toString(i)).setSource("c_field", i).setParent("" + 0)); + } + for (int i = 0; i < 10; i++) { + builders.add(client().prepareIndex("test", "child", Integer.toString(i + 10)).setSource("c_field", i + 10).setParent(Integer.toString(i))); + } + + if (randomBoolean()) { + break; // randomly break out and dont' have deletes / updates + } + } + indexRandom(true, builders); + + for (int i = 1; i <= 10; i++) { + logger.info("Round {}", i); + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(queryFilter(topChildrenQuery("child", matchAllQuery())).cache(true))).execute() + .actionGet(); + assertNoFailures(searchResponse); + searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(queryFilter(hasChildQuery("child", matchAllQuery()).scoreType("max")).cache(true))) + .get(); + assertNoFailures(searchResponse); + searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(queryFilter(hasParentQuery("parent", matchAllQuery()).scoreType("score")).cache(true))) + .get(); + assertNoFailures(searchResponse); + } + } + + @Test + public void testHasParentFilter() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + Map> parentToChildren = newHashMap(); + // Childless parent + client().prepareIndex("test", "parent", "p0").setSource("p_field", "p0").get(); + parentToChildren.put("p0", new HashSet()); + + String previousParentId = null; + int numChildDocs = 32; + int numChildDocsPerParent = 0; + List builders = new ArrayList<>(); + for (int i = 1; i <= numChildDocs; i++) { + + if (previousParentId == null || i % numChildDocsPerParent == 0) { + previousParentId = "p" + i; + builders.add(client().prepareIndex("test", "parent", previousParentId).setSource("p_field", previousParentId)); + numChildDocsPerParent++; + } + + String childId = "c" + i; + builders.add(client().prepareIndex("test", "child", childId).setSource("c_field", childId).setParent(previousParentId)); + + if (!parentToChildren.containsKey(previousParentId)) { + parentToChildren.put(previousParentId, new HashSet()); + } + assertThat(parentToChildren.get(previousParentId).add(childId), is(true)); + } + indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()])); + + assertThat(parentToChildren.isEmpty(), equalTo(false)); + for (Map.Entry> parentToChildrenEntry : parentToChildren.entrySet()) { + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", parentToChildrenEntry.getKey())))) + .setSize(numChildDocsPerParent).get(); + + assertNoFailures(searchResponse); + Set childIds = parentToChildrenEntry.getValue(); + assertThat(searchResponse.getHits().totalHits(), equalTo((long) childIds.size())); + for (int i = 0; i < searchResponse.getHits().totalHits(); i++) { + assertThat(childIds.remove(searchResponse.getHits().getAt(i).id()), is(true)); + assertThat(searchResponse.getHits().getAt(i).score(), is(1.0f)); + } + assertThat(childIds.size(), is(0)); + } + } + + @Test + public void simpleChildQueryWithFlush() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data with flushes, so we have many segments + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().admin().indices().prepareFlush().get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().admin().indices().prepareFlush().get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().admin().indices().prepareFlush().get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().admin().indices().prepareFlush().get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().admin().indices().prepareFlush().get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + client().admin().indices().prepareFlush().get(); + refresh(); + + // TOP CHILDREN QUERY + + SearchResponse searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "yellow"))) + .get(); + assertNoFailures(searchResponse); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + + searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "blue"))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2")); + + searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "red"))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1"))); + + // HAS CHILD QUERY + + searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + + searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2")); + + searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1"))); + + // HAS CHILD FILTER + + searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + + searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2")); + + searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "red")))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1"))); + } + + @Test + public void testScopedFacet() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + + refresh(); + + SearchResponse searchResponse = client() + .prepareSearch("test") + .setQuery(topChildrenQuery("child", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")))) + .addAggregation(AggregationBuilders.global("global").subAggregation( + AggregationBuilders.filter("filter").filter(boolFilter().should(termFilter("c_field", "red")).should(termFilter("c_field", "yellow"))).subAggregation( + AggregationBuilders.terms("facet1").field("c_field")))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1"))); + + Global global = searchResponse.getAggregations().get("global"); + Filter filter = global.getAggregations().get("filter"); + Terms termsFacet = filter.getAggregations().get("facet1"); + assertThat(termsFacet.getBuckets().size(), equalTo(2)); + assertThat(termsFacet.getBuckets().get(0).getKeyAsString(), equalTo("red")); + assertThat(termsFacet.getBuckets().get(0).getDocCount(), equalTo(2L)); + assertThat(termsFacet.getBuckets().get(1).getKeyAsString(), equalTo("yellow")); + assertThat(termsFacet.getBuckets().get(1).getDocCount(), equalTo(1L)); + } + + @Test + public void testDeletedParent() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + + refresh(); + + // TOP CHILDREN QUERY + + SearchResponse searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "yellow"))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1\"")); + + searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1\"")); + + // update p1 and see what that we get updated values... + + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1_updated").get(); + client().admin().indices().prepareRefresh().get(); + + searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "yellow"))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1_updated\"")); + + searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1_updated\"")); + } + + @Test + public void testDfsSearchType() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + + refresh(); + + SearchResponse searchResponse = client().prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(boolQuery().mustNot(hasChildQuery("child", boolQuery().should(queryStringQuery("c_field:*"))))).get(); + assertNoFailures(searchResponse); + + searchResponse = client().prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(boolQuery().mustNot(hasParentQuery("parent", boolQuery().should(queryStringQuery("p_field:*"))))).execute() + .actionGet(); + assertNoFailures(searchResponse); + + searchResponse = client().prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + .setQuery(boolQuery().mustNot(topChildrenQuery("child", boolQuery().should(queryStringQuery("c_field:*"))))).execute() + .actionGet(); + assertNoFailures(searchResponse); + } + + @Test + public void testFixAOBEIfTopChildrenIsWrappedInMusNotClause() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + + refresh(); + + SearchResponse searchResponse = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) + .setQuery(boolQuery().mustNot(topChildrenQuery("child", boolQuery().should(queryStringQuery("c_field:*"))))).execute() + .actionGet(); + assertNoFailures(searchResponse); + } + + @Test + public void testTopChildrenReSearchBug() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + int numberOfParents = 4; + int numberOfChildrenPerParent = 123; + for (int i = 1; i <= numberOfParents; i++) { + String parentId = String.format(Locale.ROOT, "p%d", i); + client().prepareIndex("test", "parent", parentId).setSource("p_field", String.format(Locale.ROOT, "p_value%d", i)).execute() + .actionGet(); + for (int j = 1; j <= numberOfChildrenPerParent; j++) { + client().prepareIndex("test", "child", String.format(Locale.ROOT, "%s_c%d", parentId, j)) + .setSource("c_field1", parentId, "c_field2", i % 2 == 0 ? "even" : "not_even").setParent(parentId).execute() + .actionGet(); + } + } + + refresh(); + + SearchResponse searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field1", "p3"))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p3")); + + searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field2", "even"))).execute() + .actionGet(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p4"))); + assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p4"))); + } + + @Test + public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrChildDocs() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + client().prepareIndex("test", "parent", "1").setSource("p_field", 1).get(); + client().prepareIndex("test", "child", "1").setParent("1").setSource("c_field", 1).get(); + client().admin().indices().prepareFlush("test").get(); + + client().prepareIndex("test", "type1", "1").setSource("p_field", 1).get(); + client().admin().indices().prepareFlush("test").get(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), hasChildFilter("child", matchAllQuery()))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), hasParentFilter("parent", matchAllQuery()))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + } + + @Test + public void testCountApiUsage() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + String parentId = "p1"; + client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); + refresh(); + + CountResponse countResponse = client().prepareCount("test").setQuery(topChildrenQuery("child", termQuery("c_field", "1"))) + .get(); + assertHitCount(countResponse, 1l); + + countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max")) + .get(); + assertHitCount(countResponse, 1l); + + countResponse = client().prepareCount("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score")) + .get(); + assertHitCount(countResponse, 1l); + + countResponse = client().prepareCount("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "1")))) + .get(); + assertHitCount(countResponse, 1l); + + countResponse = client().prepareCount("test").setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", "1")))) + .get(); + assertHitCount(countResponse, 1l); + } + + @Test + public void testExplainUsage() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + String parentId = "p1"; + client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); + refresh(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setExplain(true) + .setQuery(topChildrenQuery("child", termQuery("c_field", "1"))) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet...")); + + searchResponse = client().prepareSearch("test") + .setExplain(true) + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max")) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet...")); + + searchResponse = client().prepareSearch("test") + .setExplain(true) + .setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score")) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet...")); + + ExplainResponse explainResponse = client().prepareExplain("test", "parent", parentId) + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max")) + .get(); + assertThat(explainResponse.isExists(), equalTo(true)); + assertThat(explainResponse.getExplanation().getDescription(), equalTo("not implemented yet...")); + } + + List createDocBuilders() { + List indexBuilders = new ArrayList<>(); + // Parent 1 and its children + indexBuilders.add(client().prepareIndex().setType("parent").setId("1").setIndex("test").setSource("p_field", "p_value1")); + indexBuilders.add(client().prepareIndex().setType("child").setId("1").setIndex("test") + .setSource("c_field1", 1, "c_field2", 0).setParent("1")); + indexBuilders.add(client().prepareIndex().setType("child").setId("2").setIndex("test") + .setSource("c_field1", 1, "c_field2", 0).setParent("1")); + indexBuilders.add(client().prepareIndex().setType("child").setId("3").setIndex("test") + .setSource("c_field1", 2, "c_field2", 0).setParent("1")); + indexBuilders.add(client().prepareIndex().setType("child").setId("4").setIndex("test") + .setSource("c_field1", 2, "c_field2", 0).setParent("1")); + indexBuilders.add(client().prepareIndex().setType("child").setId("5").setIndex("test") + .setSource("c_field1", 1, "c_field2", 1).setParent("1")); + indexBuilders.add(client().prepareIndex().setType("child").setId("6").setIndex("test") + .setSource("c_field1", 1, "c_field2", 2).setParent("1")); + + // Parent 2 and its children + indexBuilders.add(client().prepareIndex().setType("parent").setId("2").setIndex("test").setSource("p_field", "p_value2")); + indexBuilders.add(client().prepareIndex().setType("child").setId("7").setIndex("test") + .setSource("c_field1", 3, "c_field2", 0).setParent("2")); + indexBuilders.add(client().prepareIndex().setType("child").setId("8").setIndex("test") + .setSource("c_field1", 1, "c_field2", 1).setParent("2")); + indexBuilders.add(client().prepareIndex().setType("child").setId("9").setIndex("test") + .setSource("c_field1", 1, "c_field2", 1).setParent("p")); // why + // "p"???? + indexBuilders.add(client().prepareIndex().setType("child").setId("10").setIndex("test") + .setSource("c_field1", 1, "c_field2", 1).setParent("2")); + indexBuilders.add(client().prepareIndex().setType("child").setId("11").setIndex("test") + .setSource("c_field1", 1, "c_field2", 1).setParent("2")); + indexBuilders.add(client().prepareIndex().setType("child").setId("12").setIndex("test") + .setSource("c_field1", 1, "c_field2", 2).setParent("2")); + + // Parent 3 and its children + + indexBuilders.add(client().prepareIndex().setType("parent").setId("3").setIndex("test") + .setSource("p_field1", "p_value3", "p_field2", 5)); + indexBuilders.add(client().prepareIndex().setType("child").setId("13").setIndex("test") + .setSource("c_field1", 4, "c_field2", 0, "c_field3", 0).setParent("3")); + indexBuilders.add(client().prepareIndex().setType("child").setId("14").setIndex("test") + .setSource("c_field1", 1, "c_field2", 1, "c_field3", 1).setParent("3")); + indexBuilders.add(client().prepareIndex().setType("child").setId("15").setIndex("test") + .setSource("c_field1", 1, "c_field2", 2, "c_field3", 2).setParent("3")); // why + // "p"???? + indexBuilders.add(client().prepareIndex().setType("child").setId("16").setIndex("test") + .setSource("c_field1", 1, "c_field2", 2, "c_field3", 3).setParent("3")); + indexBuilders.add(client().prepareIndex().setType("child").setId("17").setIndex("test") + .setSource("c_field1", 1, "c_field2", 2, "c_field3", 4).setParent("3")); + indexBuilders.add(client().prepareIndex().setType("child").setId("18").setIndex("test") + .setSource("c_field1", 1, "c_field2", 2, "c_field3", 5).setParent("3")); + indexBuilders.add(client().prepareIndex().setType("child1").setId("1").setIndex("test") + .setSource("c_field1", 1, "c_field2", 2, "c_field3", 6).setParent("3")); + + return indexBuilders; + } + + @Test + public void testScoreForParentChildQueries_withFunctionScore() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent") + .addMapping("child1", "_parent", "type=parent")); + ensureGreen(); + + indexRandom(true, createDocBuilders().toArray(new IndexRequestBuilder[0])); + SearchResponse response = client() + .prepareSearch("test") + .setQuery( + QueryBuilders.hasChildQuery( + "child", + QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), scriptFunction("doc['c_field1'].value")) + .boostMode(CombineFunction.REPLACE.getName())).scoreType("sum")).get(); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("1")); + assertThat(response.getHits().hits()[0].score(), equalTo(6f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(4f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(3f)); + + response = client() + .prepareSearch("test") + .setQuery( + QueryBuilders.hasChildQuery( + "child", + QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), scriptFunction("doc['c_field1'].value")) + .boostMode(CombineFunction.REPLACE.getName())).scoreType("max")).get(); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(4f)); + assertThat(response.getHits().hits()[1].id(), equalTo("2")); + assertThat(response.getHits().hits()[1].score(), equalTo(3f)); + assertThat(response.getHits().hits()[2].id(), equalTo("1")); + assertThat(response.getHits().hits()[2].score(), equalTo(2f)); + + response = client() + .prepareSearch("test") + .setQuery( + QueryBuilders.hasChildQuery( + "child", + QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), scriptFunction("doc['c_field1'].value")) + .boostMode(CombineFunction.REPLACE.getName())).scoreType("avg")).get(); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(4f)); + assertThat(response.getHits().hits()[1].id(), equalTo("2")); + assertThat(response.getHits().hits()[1].score(), equalTo(3f)); + assertThat(response.getHits().hits()[2].id(), equalTo("1")); + assertThat(response.getHits().hits()[2].score(), equalTo(1.5f)); + + response = client() + .prepareSearch("test") + .setQuery( + QueryBuilders.hasParentQuery( + "parent", + QueryBuilders.functionScoreQuery(matchQuery("p_field1", "p_value3"), scriptFunction("doc['p_field2'].value")) + .boostMode(CombineFunction.REPLACE.getName())).scoreType("score")) + .addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()).get(); + + assertThat(response.getHits().totalHits(), equalTo(7l)); + assertThat(response.getHits().hits()[0].id(), equalTo("13")); + assertThat(response.getHits().hits()[0].score(), equalTo(5f)); + assertThat(response.getHits().hits()[1].id(), equalTo("14")); + assertThat(response.getHits().hits()[1].score(), equalTo(5f)); + assertThat(response.getHits().hits()[2].id(), equalTo("15")); + assertThat(response.getHits().hits()[2].score(), equalTo(5f)); + assertThat(response.getHits().hits()[3].id(), equalTo("16")); + assertThat(response.getHits().hits()[3].score(), equalTo(5f)); + assertThat(response.getHits().hits()[4].id(), equalTo("17")); + assertThat(response.getHits().hits()[4].score(), equalTo(5f)); + assertThat(response.getHits().hits()[5].id(), equalTo("18")); + assertThat(response.getHits().hits()[5].score(), equalTo(5f)); + assertThat(response.getHits().hits()[6].id(), equalTo("1")); + assertThat(response.getHits().hits()[6].score(), equalTo(5f)); + } + + @Test + // https://github.com/elasticsearch/elasticsearch/issues/2536 + public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + SearchResponse response = client().prepareSearch("test") + .setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value"))).get(); + assertNoFailures(response); + assertThat(response.getHits().totalHits(), equalTo(0l)); + + client().prepareIndex("test", "child1").setSource(jsonBuilder().startObject().field("text", "value").endObject()).setRefresh(true) + .get(); + + response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value"))).get(); + assertNoFailures(response); + assertThat(response.getHits().totalHits(), equalTo(0l)); + + response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).scoreType("max")) + .get(); + assertNoFailures(response); + assertThat(response.getHits().totalHits(), equalTo(0l)); + + response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value"))).get(); + assertNoFailures(response); + assertThat(response.getHits().totalHits(), equalTo(0l)); + + response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value")).scoreType("score")) + .get(); + assertNoFailures(response); + assertThat(response.getHits().totalHits(), equalTo(0l)); + } + + @Test + public void testHasChildAndHasParentFilter_withFilter() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + client().prepareIndex("test", "parent", "1").setSource("p_field", 1).get(); + client().prepareIndex("test", "child", "2").setParent("1").setSource("c_field", 1).get(); + client().admin().indices().prepareFlush("test").get(); + + client().prepareIndex("test", "type1", "3").setSource("p_field", "p_value1").get(); + client().admin().indices().prepareFlush("test").get(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), hasChildFilter("child", termFilter("c_field", 1)))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().hits()[0].id(), equalTo("1")); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), hasParentFilter("parent", termFilter("p_field", 1)))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().hits()[0].id(), equalTo("2")); + } + + @Test + public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // query filter in case for p/c shouldn't execute per segment, but rather + client().prepareIndex("test", "parent", "1").setSource("p_field", 1).get(); + client().admin().indices().prepareFlush("test").setForce(true).get(); + client().prepareIndex("test", "child", "2").setParent("1").setSource("c_field", 1).get(); + refresh(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), queryFilter(hasChildQuery("child", matchQuery("c_field", 1))))).get(); + assertSearchHit(searchResponse, 1, hasId("1")); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), queryFilter(topChildrenQuery("child", matchQuery("c_field", 1))))).get(); + assertSearchHit(searchResponse, 1, hasId("1")); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), queryFilter(hasParentQuery("parent", matchQuery("p_field", 1))))).get(); + assertSearchHit(searchResponse, 1, hasId("2")); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), queryFilter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", 1)))))).get(); + assertSearchHit(searchResponse, 1, hasId("1")); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), queryFilter(boolQuery().must(topChildrenQuery("child", matchQuery("c_field", 1)))))).get(); + assertSearchHit(searchResponse, 1, hasId("1")); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), queryFilter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1)))))).get(); + assertSearchHit(searchResponse, 1, hasId("2")); + } + + @Test + public void testHasChildAndHasParentWrappedInAQueryFilterShouldNeverGetCached() throws Exception { + assertAcked(prepareCreate("test") + .setSettings(ImmutableSettings.builder().put("index.cache.filter.type", "weighted")) + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + client().prepareIndex("test", "parent", "1").setSource("p_field", 1).get(); + client().prepareIndex("test", "child", "2").setParent("1").setSource("c_field", 1).get(); + refresh(); + + for (int i = 0; i < 10; i++) { + SearchResponse searchResponse = client().prepareSearch("test") + .setExplain(true) + .setQuery(constantScoreQuery(boolFilter() + .must(queryFilter(hasChildQuery("child", matchQuery("c_field", 1)))) + .cache(true) + )).get(); + assertSearchHit(searchResponse, 1, hasId("1")); + // Can't start with ConstantScore(cache(BooleanFilter( + assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), startsWith("ConstantScore(BooleanFilter(")); + + searchResponse = client().prepareSearch("test") + .setExplain(true) + .setQuery(constantScoreQuery(boolFilter() + .must(queryFilter(boolQuery().must(matchAllQuery()).must(hasChildQuery("child", matchQuery("c_field", 1))))) + .cache(true) + )).get(); + assertSearchHit(searchResponse, 1, hasId("1")); + // Can't start with ConstantScore(cache(BooleanFilter( + assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), startsWith("ConstantScore(BooleanFilter(")); + } + } + + @Test + public void testSimpleQueryRewrite() throws Exception { + assertAcked(prepareCreate("test") + //top_children query needs at least 2 shards for the totalHits to be accurate + .setSettings(settingsBuilder() + .put(indexSettings()) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(2, DEFAULT_MAX_NUM_SHARDS))) + .addMapping("parent", "p_field", "type=string") + .addMapping("child", "_parent", "type=parent", "c_field", "type=string")); + ensureGreen(); + + // index simple data + int childId = 0; + for (int i = 0; i < 10; i++) { + String parentId = String.format(Locale.ROOT, "p%03d", i); + client().prepareIndex("test", "parent", parentId).setSource("p_field", parentId).get(); + int j = childId; + for (; j < childId + 50; j++) { + String childUid = String.format(Locale.ROOT, "c%03d", j); + client().prepareIndex("test", "child", childUid).setSource("c_field", childUid).setParent(parentId).get(); + } + childId = j; + } + refresh(); + + SearchType[] searchTypes = new SearchType[]{SearchType.QUERY_THEN_FETCH, SearchType.DFS_QUERY_THEN_FETCH}; + for (SearchType searchType : searchTypes) { + SearchResponse searchResponse = client().prepareSearch("test").setSearchType(searchType) + .setQuery(hasChildQuery("child", prefixQuery("c_field", "c")).scoreType("max")).addSort("p_field", SortOrder.ASC) + .setSize(5).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(10L)); + assertThat(searchResponse.getHits().hits()[0].id(), equalTo("p000")); + assertThat(searchResponse.getHits().hits()[1].id(), equalTo("p001")); + assertThat(searchResponse.getHits().hits()[2].id(), equalTo("p002")); + assertThat(searchResponse.getHits().hits()[3].id(), equalTo("p003")); + assertThat(searchResponse.getHits().hits()[4].id(), equalTo("p004")); + + searchResponse = client().prepareSearch("test").setSearchType(searchType) + .setQuery(hasParentQuery("parent", prefixQuery("p_field", "p")).scoreType("score")).addSort("c_field", SortOrder.ASC) + .setSize(5).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(500L)); + assertThat(searchResponse.getHits().hits()[0].id(), equalTo("c000")); + assertThat(searchResponse.getHits().hits()[1].id(), equalTo("c001")); + assertThat(searchResponse.getHits().hits()[2].id(), equalTo("c002")); + assertThat(searchResponse.getHits().hits()[3].id(), equalTo("c003")); + assertThat(searchResponse.getHits().hits()[4].id(), equalTo("c004")); + + searchResponse = client().prepareSearch("test").setSearchType(searchType) + .setQuery(topChildrenQuery("child", prefixQuery("c_field", "c")).factor(10)).addSort("p_field", SortOrder.ASC).setSize(5) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(10L)); + assertThat(searchResponse.getHits().hits()[0].id(), equalTo("p000")); + assertThat(searchResponse.getHits().hits()[1].id(), equalTo("p001")); + assertThat(searchResponse.getHits().hits()[2].id(), equalTo("p002")); + assertThat(searchResponse.getHits().hits()[3].id(), equalTo("p003")); + assertThat(searchResponse.getHits().hits()[4].id(), equalTo("p004")); + } + } + + @Test + // See also issue: + // https://github.com/elasticsearch/elasticsearch/issues/3144 + public void testReIndexingParentAndChildDocuments() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "x").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "x").setParent("p2").get(); + + refresh(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreType("sum")).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1\"")); + + searchResponse = client() + .prepareSearch("test") + .setQuery( + boolQuery().must(matchQuery("c_field", "x")).must( + hasParentQuery("parent", termQuery("p_field", "p_value2")).scoreType("score"))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c3")); + assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c4")); + + // re-index + for (int i = 0; i < 10; i++) { + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "d" + i).setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "x").setParent("p2").get(); + client().admin().indices().prepareRefresh("test").get(); + } + + searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreType("sum")) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); + assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1\"")); + + searchResponse = client() + .prepareSearch("test") + .setQuery( + boolQuery().must(matchQuery("c_field", "x")).must( + hasParentQuery("parent", termQuery("p_field", "p_value2")).scoreType("score"))).get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getAt(0).id(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); + assertThat(searchResponse.getHits().getAt(1).id(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); + } + + @Test + // See also issue: + // https://github.com/elasticsearch/elasticsearch/issues/3203 + public void testHasChildQueryWithMinimumScore() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "x").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "x").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "x").setParent("p2").get(); + client().prepareIndex("test", "child", "c5").setSource("c_field", "x").setParent("p2").get(); + refresh(); + + SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery()).scoreType("sum")) + .setMinScore(3) // Score needs to be 3 or above! + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2")); + assertThat(searchResponse.getHits().getAt(0).score(), equalTo(3.0f)); + } + + @Test + public void testParentFieldFilter() throws Exception { + assertAcked(prepareCreate("test") + .setSettings(settingsBuilder().put(indexSettings()) + .put("index.refresh_interval", -1)) + .addMapping("parent") + .addMapping("child", "_parent", "type=parent") + .addMapping("child2", "_parent", "type=parent")); + ensureGreen(); + + // test term filter + SearchResponse response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termFilter("_parent", "p1"))) + .get(); + assertHitCount(response, 0l); + + client().prepareIndex("test", "some_type", "1").setSource("field", "value").get(); + client().prepareIndex("test", "parent", "p1").setSource("p_field", "value").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "value").setParent("p1").get(); + + response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termFilter("_parent", "p1"))).execute() + .actionGet(); + assertHitCount(response, 0l); + refresh(); + + response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termFilter("_parent", "p1"))).execute() + .actionGet(); + assertHitCount(response, 1l); + + response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termFilter("_parent", "parent#p1"))).execute() + .actionGet(); + assertHitCount(response, 1l); + + client().prepareIndex("test", "parent2", "p1").setSource("p_field", "value").setRefresh(true).get(); + + response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termFilter("_parent", "p1"))).execute() + .actionGet(); + assertHitCount(response, 1l); + + response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termFilter("_parent", "parent#p1"))).execute() + .actionGet(); + assertHitCount(response, 1l); + + // test terms filter + client().prepareIndex("test", "child2", "c1").setSource("c_field", "value").setParent("p1").get(); + response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termsFilter("_parent", "p1"))).execute() + .actionGet(); + assertHitCount(response, 1l); + + response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termsFilter("_parent", "parent#p1"))).execute() + .actionGet(); + assertHitCount(response, 1l); + + refresh(); + response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termsFilter("_parent", "p1"))).execute() + .actionGet(); + assertHitCount(response, 2l); + + refresh(); + response = client().prepareSearch("test").setQuery(filteredQuery(matchAllQuery(), termsFilter("_parent", "p1", "p1"))).execute() + .actionGet(); + assertHitCount(response, 2l); + + response = client().prepareSearch("test") + .setQuery(filteredQuery(matchAllQuery(), termsFilter("_parent", "parent#p1", "parent2#p1"))).get(); + assertHitCount(response, 2l); + } + + @Test + public void testHasChildNotBeingCached() throws ElasticsearchException, IOException { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "parent", "p3").setSource("p_field", "p_value3").get(); + client().prepareIndex("test", "parent", "p4").setSource("p_field", "p_value4").get(); + client().prepareIndex("test", "parent", "p5").setSource("p_field", "p_value5").get(); + client().prepareIndex("test", "parent", "p6").setSource("p_field", "p_value6").get(); + client().prepareIndex("test", "parent", "p7").setSource("p_field", "p_value7").get(); + client().prepareIndex("test", "parent", "p8").setSource("p_field", "p_value8").get(); + client().prepareIndex("test", "parent", "p9").setSource("p_field", "p_value9").get(); + client().prepareIndex("test", "parent", "p10").setSource("p_field", "p_value10").get(); + client().prepareIndex("test", "child", "c1").setParent("p1").setSource("c_field", "blue").get(); + client().admin().indices().prepareFlush("test").get(); + client().admin().indices().prepareRefresh("test").get(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")).cache(true))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + + client().prepareIndex("test", "child", "c2").setParent("p2").setSource("c_field", "blue").get(); + client().admin().indices().prepareRefresh("test").get(); + + searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")).cache(true))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + } + + @Test + public void testDeleteByQuery_has_child() throws Exception { + assertAcked(prepareCreate("test") + .setSettings( + settingsBuilder().put(indexSettings()) + .put("index.refresh_interval", "-1") + ) + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().admin().indices().prepareFlush("test").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + client().prepareIndex("test", "parent", "p3").setSource("p_field", "p_value3").get(); + client().admin().indices().prepareFlush("test").get(); + client().prepareIndex("test", "child", "c5").setSource("c_field", "blue").setParent("p3").get(); + client().prepareIndex("test", "child", "c6").setSource("c_field", "red").setParent("p3").get(); + client().admin().indices().prepareRefresh().get(); + // p4 will not be found via search api, but will be deleted via delete_by_query api! + client().prepareIndex("test", "parent", "p4").setSource("p_field", "p_value4").get(); + client().prepareIndex("test", "child", "c7").setSource("c_field", "blue").setParent("p4").get(); + client().prepareIndex("test", "child", "c8").setSource("c_field", "red").setParent("p4").get(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(randomHasChild("child", "c_field", "blue")) + .get(); + assertHitCount(searchResponse, 2l); + + // Delete by query doesn't support p/c queries. If the delete by query has a different execution mode + // that doesn't rely on IW#deleteByQuery() then this test can be changed. + DeleteByQueryResponse deleteByQueryResponse = client().prepareDeleteByQuery("test").setQuery(randomHasChild("child", "c_field", "blue")).get(); + assertThat(deleteByQueryResponse.getIndex("test").getShardInfo().getSuccessful(), equalTo(0)); + assertThat(deleteByQueryResponse.getIndex("test").getShardInfo().getFailures().length, equalTo(getNumShards("test").numPrimaries)); + assertThat(deleteByQueryResponse.getIndex("test").getShardInfo().getFailures()[0].reason(), containsString("[has_child] query and filter unsupported in delete_by_query api")); + client().admin().indices().prepareRefresh("test").get(); + + searchResponse = client().prepareSearch("test") + .setQuery(randomHasChild("child", "c_field", "blue")) + .get(); + assertHitCount(searchResponse, 3l); + } + + @Test + public void testDeleteByQuery_has_child_SingleRefresh() throws Exception { + assertAcked(prepareCreate("test") + .setSettings( + settingsBuilder() + .put(indexSettings()) + .put("index.refresh_interval", "-1") + ) + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().admin().indices().prepareFlush().get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + client().prepareIndex("test", "parent", "p3").setSource("p_field", "p_value3").get(); + client().prepareIndex("test", "child", "c5").setSource("c_field", "blue").setParent("p3").get(); + client().prepareIndex("test", "child", "c6").setSource("c_field", "red").setParent("p3").get(); + client().prepareIndex("test", "parent", "p4").setSource("p_field", "p_value4").get(); + client().prepareIndex("test", "child", "c7").setSource("c_field", "blue").setParent("p4").get(); + client().prepareIndex("test", "child", "c8").setSource("c_field", "red").setParent("p4").get(); + client().admin().indices().prepareRefresh().get(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(randomHasChild("child", "c_field", "blue")) + .get(); + assertHitCount(searchResponse, 3l); + + DeleteByQueryResponse deleteByQueryResponse = client().prepareDeleteByQuery("test").setQuery(randomHasChild("child", "c_field", "blue")).get(); + assertThat(deleteByQueryResponse.getIndex("test").getShardInfo().getSuccessful(), equalTo(0)); + assertThat(deleteByQueryResponse.getIndex("test").getShardInfo().getFailures().length, equalTo(getNumShards("test").numPrimaries)); + assertThat(deleteByQueryResponse.getIndex("test").getShardInfo().getFailures()[0].reason(), containsString("[has_child] query and filter unsupported in delete_by_query api")); + client().admin().indices().prepareRefresh("test").get(); + + searchResponse = client().prepareSearch("test") + .setQuery(randomHasChild("child", "c_field", "blue")) + .get(); + assertHitCount(searchResponse, 3l); + } + + private QueryBuilder randomHasChild(String type, String field, String value) { + if (randomBoolean()) { + if (randomBoolean()) { + return constantScoreQuery(hasChildFilter(type, termQuery(field, value))); + } else { + return filteredQuery(matchAllQuery(), hasChildFilter(type, termQuery(field, value))); + } + } else { + return hasChildQuery(type, termQuery(field, value)); + } + } + + @Test + public void testDeleteByQuery_has_parent() throws Exception { + assertAcked(prepareCreate("test") + .setSettings( + settingsBuilder() + .put(indexSettings()) + .put("index.refresh_interval", "-1") + ) + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); + client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().admin().indices().prepareFlush("test").get(); + client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); + client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); + client().admin().indices().prepareRefresh().get(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(randomHasParent("parent", "p_field", "p_value2")) + .get(); + assertHitCount(searchResponse, 2l); + + DeleteByQueryResponse deleteByQueryResponse = client().prepareDeleteByQuery("test") + .setQuery(randomHasParent("parent", "p_field", "p_value2")) + .get(); + assertThat(deleteByQueryResponse.getIndex("test").getShardInfo().getSuccessful(), equalTo(0)); + assertThat(deleteByQueryResponse.getIndex("test").getShardInfo().getFailures().length, equalTo(getNumShards("test").numPrimaries)); + assertThat(deleteByQueryResponse.getIndex("test").getShardInfo().getFailures()[0].reason(), containsString("[has_parent] query and filter unsupported in delete_by_query api")); + client().admin().indices().prepareRefresh("test").get(); + client().admin().indices().prepareRefresh("test").get(); + client().admin().indices().prepareRefresh("test").get(); + + searchResponse = client().prepareSearch("test") + .setQuery(randomHasParent("parent", "p_field", "p_value2")) + .get(); + assertHitCount(searchResponse, 2l); + } + + private QueryBuilder randomHasParent(String type, String field, String value) { + if (randomBoolean()) { + if (randomBoolean()) { + return constantScoreQuery(hasParentFilter(type, termQuery(field, value))); + } else { + return filteredQuery(matchAllQuery(), hasParentFilter(type, termQuery(field, value))); + } + } else { + return hasParentQuery(type, termQuery(field, value)); + } + } + + @Test + // Relates to bug: https://github.com/elasticsearch/elasticsearch/issues/3818 + public void testHasChildQueryOnlyReturnsSingleChildType() { + assertAcked(prepareCreate("grandissue") + .addMapping("grandparent", "name", "type=string") + .addMapping("parent", "_parent", "type=grandparent") + .addMapping("child_type_one", "_parent", "type=parent") + .addMapping("child_type_two", "_parent", "type=parent")); + + client().prepareIndex("grandissue", "grandparent", "1").setSource("name", "Grandpa").get(); + client().prepareIndex("grandissue", "parent", "2").setParent("1").setSource("name", "Dana").get(); + client().prepareIndex("grandissue", "child_type_one", "3").setParent("2").setRouting("1") + .setSource("name", "William") + .get(); + client().prepareIndex("grandissue", "child_type_two", "4").setParent("2").setRouting("1") + .setSource("name", "Kate") + .get(); + refresh(); + + SearchResponse searchResponse = client().prepareSearch("grandissue").setQuery( + boolQuery().must( + hasChildQuery( + "parent", + boolQuery().must( + hasChildQuery( + "child_type_one", + boolQuery().must( + queryStringQuery("name:William*").analyzeWildcard(true) + ) + ) + ) + ) + ) + ).get(); + assertHitCount(searchResponse, 1l); + + searchResponse = client().prepareSearch("grandissue").setQuery( + boolQuery().must( + hasChildQuery( + "parent", + boolQuery().must( + hasChildQuery( + "child_type_two", + boolQuery().must( + queryStringQuery("name:William*").analyzeWildcard(true) + ) + ) + ) + ) + ) + ).get(); + assertHitCount(searchResponse, 0l); + } + + @Test + public void indexChildDocWithNoParentMapping() throws ElasticsearchException, IOException { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child1")); + ensureGreen(); + + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1", "_parent", "bla").get(); + try { + client().prepareIndex("test", "child1", "c1").setParent("p1").setSource("c_field", "blue").get(); + fail(); + } catch (ElasticsearchIllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Can't specify parent if no parent field has been configured")); + } + try { + client().prepareIndex("test", "child2", "c2").setParent("p1").setSource("c_field", "blue").get(); + fail(); + } catch (ElasticsearchIllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("Can't specify parent if no parent field has been configured")); + } + + refresh(); + } + + @Test + public void testAddingParentToExistingMapping() throws ElasticsearchException, IOException { + createIndex("test"); + ensureGreen(); + + PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("child").setSource("number", "type=integer") + .get(); + assertThat(putMappingResponse.isAcknowledged(), equalTo(true)); + + GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get(); + Map mapping = getMappingsResponse.getMappings().get("test").get("child").getSourceAsMap(); + assertThat(mapping.size(), greaterThanOrEqualTo(1)); // there are potentially some meta fields configured randomly + assertThat(mapping.get("properties"), notNullValue()); + + try { + // Adding _parent metadata field to existing mapping is prohibited: + client().admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("child") + .startObject("_parent").field("type", "parent").endObject() + .endObject().endObject()).get(); + fail(); + } catch (MergeMappingException e) { + assertThat(e.getMessage(), equalTo("Merge failed with failures {[The _parent field's type option can't be changed]}")); + } + } + + @Test + // The SimpleIdReaderTypeCache#docById method used lget, which can't be used if a map is shared. + public void testTopChildrenBug_concurrencyIssue() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c1").setParent("p1").setSource("c_field", "blue").get(); + client().prepareIndex("test", "child", "c2").setParent("p1").setSource("c_field", "red").get(); + client().prepareIndex("test", "child", "c3").setParent("p2").setSource("c_field", "red").get(); + client().admin().indices().prepareRefresh("test").get(); + + int numThreads = 10; + final CountDownLatch latch = new CountDownLatch(numThreads); + final AtomicReference holder = new AtomicReference<>(); + Runnable r = new Runnable() { + @Override + public void run() { + try { + for (int i = 0; i < 100; i++) { + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(topChildrenQuery("child", termQuery("c_field", "blue"))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + + searchResponse = client().prepareSearch("test") + .setQuery(topChildrenQuery("child", termQuery("c_field", "red"))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + } + } catch (AssertionError error) { + holder.set(error); + } finally { + latch.countDown(); + } + } + }; + + for (int i = 0; i < 10; i++) { + new Thread(r).start(); + } + latch.await(); + if (holder.get() != null) { + throw holder.get(); + } + } + + @Test + public void testHasChildQueryWithNestedInnerObjects() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent", "objects", "type=nested") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + client().prepareIndex("test", "parent", "p1") + .setSource(jsonBuilder().startObject().field("p_field", "1").startArray("objects") + .startObject().field("i_field", "1").endObject() + .startObject().field("i_field", "2").endObject() + .startObject().field("i_field", "3").endObject() + .startObject().field("i_field", "4").endObject() + .startObject().field("i_field", "5").endObject() + .startObject().field("i_field", "6").endObject() + .endArray().endObject()) + .get(); + client().prepareIndex("test", "parent", "p2") + .setSource(jsonBuilder().startObject().field("p_field", "2").startArray("objects") + .startObject().field("i_field", "1").endObject() + .startObject().field("i_field", "2").endObject() + .endArray().endObject()) + .get(); + client().prepareIndex("test", "child", "c1").setParent("p1").setSource("c_field", "blue").get(); + client().prepareIndex("test", "child", "c2").setParent("p1").setSource("c_field", "red").get(); + client().prepareIndex("test", "child", "c3").setParent("p2").setSource("c_field", "red").get(); + refresh(); + + String scoreMode = ScoreType.values()[getRandom().nextInt(ScoreType.values().length)].name().toLowerCase(Locale.ROOT); + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(QueryBuilders.hasChildQuery("child", termQuery("c_field", "blue")).scoreType(scoreMode), notFilter(termFilter("p_field", "3")))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + + searchResponse = client().prepareSearch("test") + .setQuery(filteredQuery(QueryBuilders.hasChildQuery("child", termQuery("c_field", "red")).scoreType(scoreMode), notFilter(termFilter("p_field", "3")))) + .get(); + assertNoFailures(searchResponse); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + } + + @Test + public void testNamedFilters() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + String parentId = "p1"; + client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").get(); + client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); + refresh(); + + SearchResponse searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "1")).queryName("test")) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + + searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max").queryName("test")) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + + searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score").queryName("test")) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + + searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "1")).filterName("test"))) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + + searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", "1")).filterName("test"))) + .get(); + assertHitCount(searchResponse, 1l); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + } + + @Test + public void testParentChildQueriesNoParentType() throws Exception { + assertAcked(prepareCreate("test") + .setSettings(settingsBuilder() + .put(indexSettings()) + .put("index.refresh_interval", -1))); + ensureGreen(); + + String parentId = "p1"; + client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").get(); + refresh(); + + try { + client().prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("c_field", "1"))) + .get(); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + } + + try { + client().prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max")) + .get(); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + } + + try { + client().prepareSearch("test") + .setPostFilter(hasChildFilter("child", termQuery("c_field", "1"))) + .get(); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + } + + try { + client().prepareSearch("test") + .setQuery(topChildrenQuery("child", termQuery("c_field", "1")).score("max")) + .get(); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + } + + try { + client().prepareSearch("test") + .setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score")) + .get(); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + } + + try { + client().prepareSearch("test") + .setPostFilter(hasParentFilter("parent", termQuery("p_field", "1"))) + .get(); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + } + } + + @Test + public void testAdd_ParentFieldAfterIndexingParentDocButBeforeIndexingChildDoc() throws Exception { + assertAcked(prepareCreate("test") + .setSettings(settingsBuilder() + .put(indexSettings()) + .put("index.refresh_interval", -1))); + ensureGreen(); + + String parentId = "p1"; + client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").get(); + refresh(); + assertAcked(client().admin() + .indices() + .preparePutMapping("test") + .setType("child") + .setSource("_parent", "type=parent")); + client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); + client().admin().indices().prepareRefresh().get(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("c_field", "1"))) + .get(); + assertHitCount(searchResponse, 1l); + assertSearchHits(searchResponse, parentId); + + searchResponse = client().prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max")) + .get(); + assertHitCount(searchResponse, 1l); + assertSearchHits(searchResponse, parentId); + + + searchResponse = client().prepareSearch("test") + .setPostFilter(hasChildFilter("child", termQuery("c_field", "1"))) + .get(); + assertHitCount(searchResponse, 1l); + assertSearchHits(searchResponse, parentId); + + searchResponse = client().prepareSearch("test") + .setQuery(topChildrenQuery("child", termQuery("c_field", "1")).score("max")) + .get(); + assertHitCount(searchResponse, 1l); + assertSearchHits(searchResponse, parentId); + + searchResponse = client().prepareSearch("test") + .setPostFilter(hasParentFilter("parent", termQuery("p_field", "1"))) + .get(); + assertHitCount(searchResponse, 1l); + assertSearchHits(searchResponse, "c1"); + + searchResponse = client().prepareSearch("test") + .setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score")) + .get(); + assertHitCount(searchResponse, 1l); + assertSearchHits(searchResponse, "c1"); + } + + @Test + public void testParentChildCaching() throws Exception { + assertAcked(prepareCreate("test") + .setSettings( + settingsBuilder() + .put(indexSettings()) + .put("index.refresh_interval", -1) + ) + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + // index simple data + client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); + client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); + client().prepareIndex("test", "child", "c1").setParent("p1").setSource("c_field", "blue").get(); + client().prepareIndex("test", "child", "c2").setParent("p1").setSource("c_field", "red").get(); + client().prepareIndex("test", "child", "c3").setParent("p2").setSource("c_field", "red").get(); + client().admin().indices().prepareOptimize("test").setMaxNumSegments(1).setFlush(true).get(); + client().prepareIndex("test", "parent", "p3").setSource("p_field", "p_value3").get(); + client().prepareIndex("test", "parent", "p4").setSource("p_field", "p_value4").get(); + client().prepareIndex("test", "child", "c4").setParent("p3").setSource("c_field", "green").get(); + client().prepareIndex("test", "child", "c5").setParent("p3").setSource("c_field", "blue").get(); + client().prepareIndex("test", "child", "c6").setParent("p4").setSource("c_field", "blue").get(); + client().admin().indices().prepareFlush("test").get(); + client().admin().indices().prepareRefresh("test").get(); + + for (int i = 0; i < 2; i++) { + SearchResponse searchResponse = client().prepareSearch() + .setQuery(filteredQuery(matchAllQuery(), boolFilter() + .must(FilterBuilders.hasChildFilter("child", matchQuery("c_field", "red"))) + .must(matchAllFilter()) + .cache(true))) + .get(); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + } + + + client().prepareIndex("test", "child", "c3").setParent("p2").setSource("c_field", "blue").get(); + client().admin().indices().prepareRefresh("test").get(); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(filteredQuery(matchAllQuery(), boolFilter() + .must(FilterBuilders.hasChildFilter("child", matchQuery("c_field", "red"))) + .must(matchAllFilter()) + .cache(true))) + .get(); + + assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); + } + + @Test + public void testParentChildQueriesViaScrollApi() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + for (int i = 0; i < 10; i++) { + client().prepareIndex("test", "parent", "p" + i).setSource("{}").get(); + client().prepareIndex("test", "child", "c" + i).setSource("{}").setParent("p" + i).get(); + } + + refresh(); + + QueryBuilder[] queries = new QueryBuilder[]{ + hasChildQuery("child", matchAllQuery()), + filteredQuery(matchAllQuery(), hasChildFilter("child", matchAllQuery())), + hasParentQuery("parent", matchAllQuery()), + filteredQuery(matchAllQuery(), hasParentFilter("parent", matchAllQuery())), + topChildrenQuery("child", matchAllQuery()).factor(10) + }; + + for (QueryBuilder query : queries) { + SearchResponse scrollResponse = client().prepareSearch("test") + .setScroll(TimeValue.timeValueSeconds(30)) + .setSize(1) + .addField("_id") + .setQuery(query) + .setSearchType("scan") + .execute() + .actionGet(); + + assertNoFailures(scrollResponse); + assertThat(scrollResponse.getHits().totalHits(), equalTo(10l)); + int scannedDocs = 0; + do { + scrollResponse = client() + .prepareSearchScroll(scrollResponse.getScrollId()) + .setScroll(TimeValue.timeValueSeconds(30)).get(); + assertThat(scrollResponse.getHits().totalHits(), equalTo(10l)); + scannedDocs += scrollResponse.getHits().getHits().length; + } while (scrollResponse.getHits().getHits().length > 0); + assertThat(scannedDocs, equalTo(10)); + } + } + + @Test + public void testValidateThatHasChildAndHasParentFilterAreNeverCached() throws Exception { + assertAcked(prepareCreate("test") + .setSettings(builder().put(indexSettings()) + //we need 0 replicas here to make sure we always hit the very same shards + .put(SETTING_NUMBER_OF_REPLICAS, 0)) + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + client().prepareIndex("test", "parent", "1").setSource("field", "value") + .get(); + client().prepareIndex("test", "child", "1").setParent("1").setSource("field", "value") + .setRefresh(true) + .get(); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(hasChildQuery("child", matchAllQuery())) + .get(); + assertHitCount(searchResponse, 1l); + + searchResponse = client().prepareSearch("test") + .setQuery(hasParentQuery("parent", matchAllQuery())) + .get(); + assertHitCount(searchResponse, 1l); + + // Internally the has_child and has_parent use filter for the type field, which end up in the filter cache, + // so by first checking how much they take by executing has_child and has_parent *query* we can set a base line + // for the filter cache size in this test. + IndicesStatsResponse statsResponse = client().admin().indices().prepareStats("test").clear().setFilterCache(true).get(); + long initialCacheSize = statsResponse.getIndex("test").getTotal().getFilterCache().getMemorySizeInBytes(); + + searchResponse = client().prepareSearch("test") + .setQuery(QueryBuilders.filteredQuery(matchAllQuery(), FilterBuilders.hasChildFilter("child", matchAllQuery()).cache(true))) + .get(); + assertHitCount(searchResponse, 1l); + + statsResponse = client().admin().indices().prepareStats("test").clear().setFilterCache(true).get(); + assertThat(statsResponse.getIndex("test").getTotal().getFilterCache().getMemorySizeInBytes(), equalTo(initialCacheSize)); + + searchResponse = client().prepareSearch("test") + .setQuery(QueryBuilders.filteredQuery(matchAllQuery(), FilterBuilders.hasParentFilter("parent", matchAllQuery()).cache(true))) + .get(); + assertHitCount(searchResponse, 1l); + + // filter cache should not contain any thing, b/c has_child and has_parent can't be cached. + statsResponse = client().admin().indices().prepareStats("test").clear().setFilterCache(true).get(); + assertThat(statsResponse.getIndex("test").getTotal().getFilterCache().getMemorySizeInBytes(), equalTo(initialCacheSize)); + + searchResponse = client().prepareSearch("test") + .setQuery(QueryBuilders.filteredQuery( + matchAllQuery(), + FilterBuilders.boolFilter().cache(true) + .must(FilterBuilders.matchAllFilter()) + .must(FilterBuilders.hasChildFilter("child", matchAllQuery()).cache(true)) + )) + .get(); + assertHitCount(searchResponse, 1l); + + searchResponse = client().prepareSearch("test") + .setQuery(QueryBuilders.filteredQuery( + matchAllQuery(), + FilterBuilders.boolFilter().cache(true) + .must(FilterBuilders.matchAllFilter()) + .must(FilterBuilders.hasParentFilter("parent", matchAllQuery()).cache(true)) + )) + .get(); + assertHitCount(searchResponse, 1l); + + // filter cache should not contain any thing, b/c has_child and has_parent can't be cached. + statsResponse = client().admin().indices().prepareStats("test").clear().setFilterCache(true).get(); + assertThat(statsResponse.getIndex("test").getTotal().getFilterCache().getMemorySizeInBytes(), equalTo(initialCacheSize)); + + searchResponse = client().prepareSearch("test") + .setQuery(QueryBuilders.filteredQuery( + matchAllQuery(), + FilterBuilders.boolFilter().cache(true) + .must(FilterBuilders.termFilter("field", "value").cache(true)) + .must(FilterBuilders.hasChildFilter("child", matchAllQuery()).cache(true)) + )) + .get(); + assertHitCount(searchResponse, 1l); + + searchResponse = client().prepareSearch("test") + .setQuery(QueryBuilders.filteredQuery( + matchAllQuery(), + FilterBuilders.boolFilter().cache(true) + .must(FilterBuilders.termFilter("field", "value").cache(true)) + .must(FilterBuilders.hasParentFilter("parent", matchAllQuery()).cache(true)) + )) + .get(); + assertHitCount(searchResponse, 1l); + + // filter cache should not contain any thing, b/c has_child and has_parent can't be cached. + statsResponse = client().admin().indices().prepareStats("test").clear().setFilterCache(true).get(); + assertThat(statsResponse.getIndex("test").getTotal().getFilterCache().getMemorySizeInBytes(), greaterThan(initialCacheSize)); + } + + // https://github.com/elasticsearch/elasticsearch/issues/5783 + @Test + public void testQueryBeforeChildType() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("features") + .addMapping("posts", "_parent", "type=features") + .addMapping("specials")); + ensureGreen(); + + client().prepareIndex("test", "features", "1").setSource("field", "foo").get(); + client().prepareIndex("test", "posts", "1").setParent("1").setSource("field", "bar").get(); + refresh(); + + SearchResponse resp; + resp = client().prepareSearch("test") + .setSource("{\"query\": {\"has_child\": {\"type\": \"posts\", \"query\": {\"match\": {\"field\": \"bar\"}}}}}").get(); + assertHitCount(resp, 1L); + + // Now reverse the order for the type after the query + resp = client().prepareSearch("test") + .setSource("{\"query\": {\"has_child\": {\"query\": {\"match\": {\"field\": \"bar\"}}, \"type\": \"posts\"}}}").get(); + assertHitCount(resp, 1L); + + } + + @Test + // https://github.com/elasticsearch/elasticsearch/issues/6256 + public void testParentFieldInMultiMatchField() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("type1") + .addMapping("type2", "_parent", "type=type1") + ); + ensureGreen(); + + client().prepareIndex("test", "type2", "1").setParent("1").setSource("field", "value").get(); + refresh(); + + SearchResponse response = client().prepareSearch("test") + .setQuery(multiMatchQuery("1", "_parent")) + .get(); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().getAt(0).id(), equalTo("1")); + } + + @Test + public void testTypeIsAppliedInHasParentInnerQuery() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "parent", "1").setSource("field1", "a")); + indexRequests.add(client().prepareIndex("test", "child", "1").setParent("1").setSource("{}")); + indexRequests.add(client().prepareIndex("test", "child", "2").setParent("1").setSource("{}")); + indexRandom(true, indexRequests); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasParentFilter("parent", notFilter(termFilter("field1", "a"))))) + .get(); + assertHitCount(searchResponse, 0l); + + searchResponse = client().prepareSearch("test") + .setQuery(hasParentQuery("parent", constantScoreQuery(notFilter(termFilter("field1", "a"))))) + .get(); + assertHitCount(searchResponse, 0l); + + searchResponse = client().prepareSearch("test") + .setQuery(constantScoreQuery(hasParentFilter("parent", termFilter("field1", "a")))) + .get(); + assertHitCount(searchResponse, 2l); + + searchResponse = client().prepareSearch("test") + .setQuery(hasParentQuery("parent", constantScoreQuery(termFilter("field1", "a")))) + .get(); + assertHitCount(searchResponse, 2l); + } + + private List createMinMaxDocBuilders() { + List indexBuilders = new ArrayList<>(); + // Parent 1 and its children + indexBuilders.add(client().prepareIndex().setType("parent").setId("1").setIndex("test").setSource("id",1)); + indexBuilders.add(client().prepareIndex().setType("child").setId("10").setIndex("test") + .setSource("foo", "one").setParent("1")); + + // Parent 2 and its children + indexBuilders.add(client().prepareIndex().setType("parent").setId("2").setIndex("test").setSource("id",2)); + indexBuilders.add(client().prepareIndex().setType("child").setId("11").setIndex("test") + .setSource("foo", "one").setParent("2")); + indexBuilders.add(client().prepareIndex().setType("child").setId("12").setIndex("test") + .setSource("foo", "one two").setParent("2")); + + // Parent 3 and its children + indexBuilders.add(client().prepareIndex().setType("parent").setId("3").setIndex("test").setSource("id",3)); + indexBuilders.add(client().prepareIndex().setType("child").setId("13").setIndex("test") + .setSource("foo", "one").setParent("3")); + indexBuilders.add(client().prepareIndex().setType("child").setId("14").setIndex("test") + .setSource("foo", "one two").setParent("3")); + indexBuilders.add(client().prepareIndex().setType("child").setId("15").setIndex("test") + .setSource("foo", "one two three").setParent("3")); + + // Parent 4 and its children + indexBuilders.add(client().prepareIndex().setType("parent").setId("4").setIndex("test").setSource("id",4)); + indexBuilders.add(client().prepareIndex().setType("child").setId("16").setIndex("test") + .setSource("foo", "one").setParent("4")); + indexBuilders.add(client().prepareIndex().setType("child").setId("17").setIndex("test") + .setSource("foo", "one two").setParent("4")); + indexBuilders.add(client().prepareIndex().setType("child").setId("18").setIndex("test") + .setSource("foo", "one two three").setParent("4")); + indexBuilders.add(client().prepareIndex().setType("child").setId("19").setIndex("test") + .setSource("foo", "one two three four").setParent("4")); + + return indexBuilders; + } + + private SearchResponse minMaxQuery(String scoreType, int minChildren, int maxChildren, int cutoff) throws SearchPhaseExecutionException { + return client() + .prepareSearch("test") + .setQuery( + QueryBuilders + .hasChildQuery( + "child", + QueryBuilders.functionScoreQuery(constantScoreQuery(FilterBuilders.termFilter("foo", "two"))).boostMode("replace").scoreMode("sum") + .add(FilterBuilders.matchAllFilter(), factorFunction(1)) + .add(FilterBuilders.termFilter("foo", "three"), factorFunction(1)) + .add(FilterBuilders.termFilter("foo", "four"), factorFunction(1))).scoreType(scoreType) + .minChildren(minChildren).maxChildren(maxChildren).setShortCircuitCutoff(cutoff)) + .addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get(); + } + + private SearchResponse minMaxFilter(int minChildren, int maxChildren, int cutoff) throws SearchPhaseExecutionException { + return client() + .prepareSearch("test") + .setQuery( + QueryBuilders.constantScoreQuery(FilterBuilders.hasChildFilter("child", termFilter("foo", "two")) + .minChildren(minChildren).maxChildren(maxChildren).setShortCircuitCutoff(cutoff))) + .addSort("id", SortOrder.ASC).setTrackScores(true).get(); + } + + @Test + public void testMinMaxChildren() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("parent", "id", "type=long") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + indexRandom(true, createMinMaxDocBuilders().toArray(new IndexRequestBuilder[0])); + SearchResponse response; + int cutoff = getRandom().nextInt(4); + + // Score mode = NONE + response = minMaxQuery("none", 0, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + assertThat(response.getHits().hits()[2].id(), equalTo("4")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("none", 1, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + assertThat(response.getHits().hits()[2].id(), equalTo("4")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("none", 2, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("4")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + + response = minMaxQuery("none", 3, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + + response = minMaxQuery("none", 4, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(0l)); + + response = minMaxQuery("none", 0, 4, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + assertThat(response.getHits().hits()[2].id(), equalTo("4")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("none", 0, 3, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + assertThat(response.getHits().hits()[2].id(), equalTo("4")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("none", 0, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + + response = minMaxQuery("none", 2, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + + try { + response = minMaxQuery("none", 3, 2, cutoff); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getMessage(), containsString("[has_child] 'max_children' is less than 'min_children'")); + } + + // Score mode = SUM + response = minMaxQuery("sum", 0, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(6f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(3f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("sum", 1, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(6f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(3f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("sum", 2, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(6f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(3f)); + + response = minMaxQuery("sum", 3, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(6f)); + + response = minMaxQuery("sum", 4, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(0l)); + + response = minMaxQuery("sum", 0, 4, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(6f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(3f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("sum", 0, 3, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(6f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(3f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("sum", 0, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(3f)); + assertThat(response.getHits().hits()[1].id(), equalTo("2")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + + response = minMaxQuery("sum", 2, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(3f)); + + try { + response = minMaxQuery("sum", 3, 2, cutoff); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getMessage(), containsString("[has_child] 'max_children' is less than 'min_children'")); + } + + // Score mode = MAX + response = minMaxQuery("max", 0, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(3f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(2f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("max", 1, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(3f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(2f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("max", 2, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(3f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(2f)); + + response = minMaxQuery("max", 3, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(3f)); + + response = minMaxQuery("max", 4, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(0l)); + + response = minMaxQuery("max", 0, 4, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(3f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(2f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("max", 0, 3, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(3f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(2f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("max", 0, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(2f)); + assertThat(response.getHits().hits()[1].id(), equalTo("2")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + + response = minMaxQuery("max", 2, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(2f)); + + try { + response = minMaxQuery("max", 3, 2, cutoff); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getMessage(), containsString("[has_child] 'max_children' is less than 'min_children'")); + } + + // Score mode = AVG + response = minMaxQuery("avg", 0, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(2f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1.5f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("avg", 1, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(2f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1.5f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("avg", 2, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(2f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1.5f)); + + response = minMaxQuery("avg", 3, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(2f)); + + response = minMaxQuery("avg", 4, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(0l)); + + response = minMaxQuery("avg", 0, 4, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(2f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1.5f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("avg", 0, 3, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(2f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1.5f)); + assertThat(response.getHits().hits()[2].id(), equalTo("2")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxQuery("avg", 0, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(1.5f)); + assertThat(response.getHits().hits()[1].id(), equalTo("2")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + + response = minMaxQuery("avg", 2, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(1.5f)); + + try { + response = minMaxQuery("avg", 3, 2, cutoff); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getMessage(), containsString("[has_child] 'max_children' is less than 'min_children'")); + } + + // HasChildFilter + response = minMaxFilter(0, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + assertThat(response.getHits().hits()[2].id(), equalTo("4")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxFilter(1, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + assertThat(response.getHits().hits()[2].id(), equalTo("4")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxFilter(2, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("4")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + + response = minMaxFilter(3, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("4")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + + response = minMaxFilter(4, 0, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(0l)); + + response = minMaxFilter(0, 4, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + assertThat(response.getHits().hits()[2].id(), equalTo("4")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxFilter(0, 3, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(3l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + assertThat(response.getHits().hits()[2].id(), equalTo("4")); + assertThat(response.getHits().hits()[2].score(), equalTo(1f)); + + response = minMaxFilter(0, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(2l)); + assertThat(response.getHits().hits()[0].id(), equalTo("2")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + assertThat(response.getHits().hits()[1].id(), equalTo("3")); + assertThat(response.getHits().hits()[1].score(), equalTo(1f)); + + response = minMaxFilter(2, 2, cutoff); + + assertThat(response.getHits().totalHits(), equalTo(1l)); + assertThat(response.getHits().hits()[0].id(), equalTo("3")); + assertThat(response.getHits().hits()[0].score(), equalTo(1f)); + + try { + response = minMaxFilter(3, 2, cutoff); + fail(); + } catch (SearchPhaseExecutionException e) { + assertThat(e.getMessage(), containsString("[has_child] 'max_children' is less than 'min_children'")); + } + + } + + @Test + @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/9461") + public void testParentFieldToNonExistingType() { + assertAcked(prepareCreate("test").addMapping("parent").addMapping("child", "_parent", "type=parent2")); + client().prepareIndex("test", "parent", "1").setSource("{}").get(); + client().prepareIndex("test", "child", "1").setParent("1").setSource("{}").get(); + refresh(); + + try { + client().prepareSearch("test") + .setQuery(QueryBuilders.hasChildQuery("child", matchAllQuery())) + .get(); + fail(); + } catch (SearchPhaseExecutionException e) { + } + + SearchResponse response = client().prepareSearch("test") + .setQuery(QueryBuilders.hasParentQuery("parent", matchAllQuery())) + .get(); + assertHitCount(response, 0); + + try { + client().prepareSearch("test") + .setQuery(QueryBuilders.constantScoreQuery(FilterBuilders.hasChildFilter("child", matchAllQuery()))) + .get(); + fail(); + } catch (SearchPhaseExecutionException e) { + } + + response = client().prepareSearch("test") + .setQuery(QueryBuilders.constantScoreQuery(FilterBuilders.hasParentFilter("parent", matchAllQuery()))) + .get(); + assertHitCount(response, 0); + } + + private static HasChildFilterBuilder hasChildFilter(String type, QueryBuilder queryBuilder) { + HasChildFilterBuilder hasChildFilterBuilder = FilterBuilders.hasChildFilter(type, queryBuilder); + hasChildFilterBuilder.setShortCircuitCutoff(randomInt(10)); + return hasChildFilterBuilder; + } + + private static HasChildFilterBuilder hasChildFilter(String type, FilterBuilder filterBuilder) { + HasChildFilterBuilder hasChildFilterBuilder = FilterBuilders.hasChildFilter(type, filterBuilder); + hasChildFilterBuilder.setShortCircuitCutoff(randomInt(10)); + return hasChildFilterBuilder; + } + + private static HasChildQueryBuilder hasChildQuery(String type, QueryBuilder queryBuilder) { + HasChildQueryBuilder hasChildQueryBuilder = QueryBuilders.hasChildQuery(type, queryBuilder); + hasChildQueryBuilder.setShortCircuitCutoff(randomInt(10)); + return hasChildQueryBuilder; + } + +} diff --git a/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java b/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java index e0ab41e6e867d..79cb1f9694e4a 100644 --- a/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java +++ b/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java @@ -44,12 +44,7 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.FieldMapper.Loading; import org.elasticsearch.index.mapper.MergeMappingException; -import org.elasticsearch.index.query.FilterBuilder; -import org.elasticsearch.index.query.FilterBuilders; -import org.elasticsearch.index.query.HasChildFilterBuilder; -import org.elasticsearch.index.query.HasChildQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.*; import org.elasticsearch.index.search.child.ScoreType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -65,14 +60,7 @@ import org.junit.Test; import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; +import java.util.*; import static com.google.common.collect.Maps.newHashMap; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; @@ -80,42 +68,12 @@ import static org.elasticsearch.common.settings.ImmutableSettings.builder; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.FilterBuilders.boolFilter; -import static org.elasticsearch.index.query.FilterBuilders.hasParentFilter; -import static org.elasticsearch.index.query.FilterBuilders.matchAllFilter; -import static org.elasticsearch.index.query.FilterBuilders.notFilter; -import static org.elasticsearch.index.query.FilterBuilders.queryFilter; -import static org.elasticsearch.index.query.FilterBuilders.termFilter; -import static org.elasticsearch.index.query.FilterBuilders.termsFilter; -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; -import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery; -import static org.elasticsearch.index.query.QueryBuilders.idsQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; -import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; -import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.topChildrenQuery; +import static org.elasticsearch.index.query.FilterBuilders.*; +import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.factorFunction; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.startsWith; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.hamcrest.Matchers.*; /** * @@ -135,8 +93,8 @@ protected Settings nodeSettings(int nodeOrdinal) { @Test public void multiLevelChild() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") - .addMapping("child", "_parent", "type=parent") + .addMapping("parent", "_parent", "parent=true") + .addMapping("child", "_parent", "type=parent,parent=true") .addMapping("grandchild", "_parent", "type=child")); ensureGreen(); @@ -190,7 +148,7 @@ public void multiLevelChild() throws Exception { // see #6722 public void test6722() throws ElasticsearchException, IOException { assertAcked(prepareCreate("test") - .addMapping("foo") + .addMapping("foo", "_parent", "parent=true") .addMapping("test", "_parent", "type=foo")); ensureGreen(); @@ -209,7 +167,7 @@ public void test6722() throws ElasticsearchException, IOException { // see #2744 public void test2744() throws ElasticsearchException, IOException { assertAcked(prepareCreate("test") - .addMapping("foo") + .addMapping("foo", "_parent", "parent=true") .addMapping("test", "_parent", "type=foo")); ensureGreen(); @@ -228,7 +186,7 @@ public void test2744() throws ElasticsearchException, IOException { @Test public void simpleChildQuery() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -266,23 +224,6 @@ public void simpleChildQuery() throws Exception { assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1")); - // TOP CHILDREN QUERY - searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "yellow"))).execute() - .actionGet(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); - - searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "blue"))) - .get(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2")); - - searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "red"))).execute() - .actionGet(); - assertHitCount(searchResponse, 2l); - assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1"))); - assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1"))); - // HAS CHILD searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")) .get(); @@ -317,6 +258,7 @@ public void simpleChildQuery() throws Exception { @Test @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/9270") + // TODO: this test only makes sense in the old p/c implementation public void testClearIdCacheBug() throws Exception { // enforce lazy loading to make sure that p/c stats are not counted as part of field data assertAcked(prepareCreate("test") @@ -400,7 +342,7 @@ public void testClearIdCacheBug() throws Exception { // See: https://github.com/elasticsearch/elasticsearch/issues/3290 public void testCachingBug_withFqueryFilter() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); List builders = new ArrayList<>(); @@ -427,10 +369,6 @@ public void testCachingBug_withFqueryFilter() throws Exception { for (int i = 1; i <= 10; i++) { logger.info("Round {}", i); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(queryFilter(topChildrenQuery("child", matchAllQuery())).cache(true))).execute() - .actionGet(); - assertNoFailures(searchResponse); - searchResponse = client().prepareSearch("test") .setQuery(constantScoreQuery(queryFilter(hasChildQuery("child", matchAllQuery()).scoreType("max")).cache(true))) .get(); assertNoFailures(searchResponse); @@ -444,7 +382,7 @@ public void testCachingBug_withFqueryFilter() throws Exception { @Test public void testHasParentFilter() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); Map> parentToChildren = newHashMap(); @@ -494,7 +432,7 @@ public void testHasParentFilter() throws Exception { @Test public void simpleChildQueryWithFlush() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -513,31 +451,8 @@ public void simpleChildQueryWithFlush() throws Exception { client().admin().indices().prepareFlush().get(); refresh(); - // TOP CHILDREN QUERY - - SearchResponse searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "yellow"))) - .get(); - assertNoFailures(searchResponse); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); - assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); - - searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "blue"))).execute() - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); - assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2")); - - searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "red"))).execute() - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); - assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1"))); - assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1"))); - // HAS CHILD QUERY - - searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))).execute() + SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))).execute() .actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); @@ -580,7 +495,7 @@ public void simpleChildQueryWithFlush() throws Exception { @Test public void testScopedFacet() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -596,7 +511,7 @@ public void testScopedFacet() throws Exception { SearchResponse searchResponse = client() .prepareSearch("test") - .setQuery(topChildrenQuery("child", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")))) + .setQuery(hasChildQuery("child", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")))) .addAggregation(AggregationBuilders.global("global").subAggregation( AggregationBuilders.filter("filter").filter(boolFilter().should(termFilter("c_field", "red")).should(termFilter("c_field", "yellow"))).subAggregation( AggregationBuilders.terms("facet1").field("c_field")))).get(); @@ -618,7 +533,7 @@ public void testScopedFacet() throws Exception { @Test public void testDeletedParent() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); // index simple data @@ -633,7 +548,7 @@ public void testDeletedParent() throws Exception { // TOP CHILDREN QUERY - SearchResponse searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "yellow"))) + SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))) .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); @@ -652,7 +567,7 @@ public void testDeletedParent() throws Exception { client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1_updated").get(); client().admin().indices().prepareRefresh().get(); - searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "yellow"))).execute() + searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))).execute() .actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); @@ -670,7 +585,7 @@ public void testDeletedParent() throws Exception { @Test public void testDfsSearchType() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -692,75 +607,12 @@ public void testDfsSearchType() throws Exception { .setQuery(boolQuery().mustNot(hasParentQuery("parent", boolQuery().should(queryStringQuery("p_field:*"))))).execute() .actionGet(); assertNoFailures(searchResponse); - - searchResponse = client().prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(boolQuery().mustNot(topChildrenQuery("child", boolQuery().should(queryStringQuery("c_field:*"))))).execute() - .actionGet(); - assertNoFailures(searchResponse); - } - - @Test - public void testFixAOBEIfTopChildrenIsWrappedInMusNotClause() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("parent") - .addMapping("child", "_parent", "type=parent")); - ensureGreen(); - - // index simple data - client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); - client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get(); - client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get(); - client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); - client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get(); - client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get(); - - refresh(); - - SearchResponse searchResponse = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) - .setQuery(boolQuery().mustNot(topChildrenQuery("child", boolQuery().should(queryStringQuery("c_field:*"))))).execute() - .actionGet(); - assertNoFailures(searchResponse); - } - - @Test - public void testTopChildrenReSearchBug() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("parent") - .addMapping("child", "_parent", "type=parent")); - ensureGreen(); - int numberOfParents = 4; - int numberOfChildrenPerParent = 123; - for (int i = 1; i <= numberOfParents; i++) { - String parentId = String.format(Locale.ROOT, "p%d", i); - client().prepareIndex("test", "parent", parentId).setSource("p_field", String.format(Locale.ROOT, "p_value%d", i)).execute() - .actionGet(); - for (int j = 1; j <= numberOfChildrenPerParent; j++) { - client().prepareIndex("test", "child", String.format(Locale.ROOT, "%s_c%d", parentId, j)) - .setSource("c_field1", parentId, "c_field2", i % 2 == 0 ? "even" : "not_even").setParent(parentId).execute() - .actionGet(); - } - } - - refresh(); - - SearchResponse searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field1", "p3"))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); - assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p3")); - - searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field2", "even"))).execute() - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); - assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p4"))); - assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p4"))); } @Test public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrChildDocs() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -785,7 +637,7 @@ public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrCh @Test public void testCountApiUsage() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -794,11 +646,7 @@ public void testCountApiUsage() throws Exception { client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); refresh(); - CountResponse countResponse = client().prepareCount("test").setQuery(topChildrenQuery("child", termQuery("c_field", "1"))) - .get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max")) + CountResponse countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max")) .get(); assertHitCount(countResponse, 1l); @@ -818,7 +666,7 @@ public void testCountApiUsage() throws Exception { @Test public void testExplainUsage() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -828,13 +676,6 @@ public void testExplainUsage() throws Exception { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setExplain(true) - .setQuery(topChildrenQuery("child", termQuery("c_field", "1"))) - .get(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet...")); - - searchResponse = client().prepareSearch("test") .setExplain(true) .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max")) .get(); @@ -914,7 +755,7 @@ List createDocBuilders() { @Test public void testScoreForParentChildQueries_withFunctionScore() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent") .addMapping("child1", "_parent", "type=parent")); ensureGreen(); @@ -998,7 +839,7 @@ public void testScoreForParentChildQueries_withFunctionScore() throws Exception // https://github.com/elasticsearch/elasticsearch/issues/2536 public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1032,7 +873,7 @@ public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Excep @Test public void testHasChildAndHasParentFilter_withFilter() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1059,7 +900,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { @Test public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1073,10 +914,6 @@ public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { .setQuery(filteredQuery(matchAllQuery(), queryFilter(hasChildQuery("child", matchQuery("c_field", 1))))).get(); assertSearchHit(searchResponse, 1, hasId("1")); - searchResponse = client().prepareSearch("test") - .setQuery(filteredQuery(matchAllQuery(), queryFilter(topChildrenQuery("child", matchQuery("c_field", 1))))).get(); - assertSearchHit(searchResponse, 1, hasId("1")); - searchResponse = client().prepareSearch("test") .setQuery(filteredQuery(matchAllQuery(), queryFilter(hasParentQuery("parent", matchQuery("p_field", 1))))).get(); assertSearchHit(searchResponse, 1, hasId("2")); @@ -1085,10 +922,6 @@ public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { .setQuery(filteredQuery(matchAllQuery(), queryFilter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", 1)))))).get(); assertSearchHit(searchResponse, 1, hasId("1")); - searchResponse = client().prepareSearch("test") - .setQuery(filteredQuery(matchAllQuery(), queryFilter(boolQuery().must(topChildrenQuery("child", matchQuery("c_field", 1)))))).get(); - assertSearchHit(searchResponse, 1, hasId("1")); - searchResponse = client().prepareSearch("test") .setQuery(filteredQuery(matchAllQuery(), queryFilter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1)))))).get(); assertSearchHit(searchResponse, 1, hasId("2")); @@ -1098,7 +931,7 @@ public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { public void testHasChildAndHasParentWrappedInAQueryFilterShouldNeverGetCached() throws Exception { assertAcked(prepareCreate("test") .setSettings(ImmutableSettings.builder().put("index.cache.filter.type", "weighted")) - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1136,7 +969,7 @@ public void testSimpleQueryRewrite() throws Exception { .setSettings(settingsBuilder() .put(indexSettings()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(2, DEFAULT_MAX_NUM_SHARDS))) - .addMapping("parent", "p_field", "type=string") + .addMapping("parent", "_parent", "parent=true", "p_field", "type=string") .addMapping("child", "_parent", "type=parent", "c_field", "type=string")); ensureGreen(); @@ -1177,17 +1010,6 @@ public void testSimpleQueryRewrite() throws Exception { assertThat(searchResponse.getHits().hits()[2].id(), equalTo("c002")); assertThat(searchResponse.getHits().hits()[3].id(), equalTo("c003")); assertThat(searchResponse.getHits().hits()[4].id(), equalTo("c004")); - - searchResponse = client().prepareSearch("test").setSearchType(searchType) - .setQuery(topChildrenQuery("child", prefixQuery("c_field", "c")).factor(10)).addSort("p_field", SortOrder.ASC).setSize(5) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo(10L)); - assertThat(searchResponse.getHits().hits()[0].id(), equalTo("p000")); - assertThat(searchResponse.getHits().hits()[1].id(), equalTo("p001")); - assertThat(searchResponse.getHits().hits()[2].id(), equalTo("p002")); - assertThat(searchResponse.getHits().hits()[3].id(), equalTo("p003")); - assertThat(searchResponse.getHits().hits()[4].id(), equalTo("p004")); } } @@ -1196,7 +1018,7 @@ public void testSimpleQueryRewrite() throws Exception { // https://github.com/elasticsearch/elasticsearch/issues/3144 public void testReIndexingParentAndChildDocuments() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1259,7 +1081,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { // https://github.com/elasticsearch/elasticsearch/issues/3203 public void testHasChildQueryWithMinimumScore() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1286,7 +1108,7 @@ public void testParentFieldFilter() throws Exception { assertAcked(prepareCreate("test") .setSettings(settingsBuilder().put(indexSettings()) .put("index.refresh_interval", -1)) - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent") .addMapping("child2", "_parent", "type=parent")); ensureGreen(); @@ -1351,7 +1173,7 @@ public void testParentFieldFilter() throws Exception { @Test public void testHasChildNotBeingCached() throws ElasticsearchException, IOException { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1393,7 +1215,7 @@ public void testDeleteByQuery_has_child() throws Exception { settingsBuilder().put(indexSettings()) .put("index.refresh_interval", "-1") ) - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1442,7 +1264,7 @@ public void testDeleteByQuery_has_child_SingleRefresh() throws Exception { .put(indexSettings()) .put("index.refresh_interval", "-1") ) - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1499,7 +1321,7 @@ public void testDeleteByQuery_has_parent() throws Exception { .put(indexSettings()) .put("index.refresh_interval", "-1") ) - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1550,8 +1372,8 @@ private QueryBuilder randomHasParent(String type, String field, String value) { // Relates to bug: https://github.com/elasticsearch/elasticsearch/issues/3818 public void testHasChildQueryOnlyReturnsSingleChildType() { assertAcked(prepareCreate("grandissue") - .addMapping("grandparent", "name", "type=string") - .addMapping("parent", "_parent", "type=grandparent") + .addMapping("grandparent", "name", "type=string", "_parent", "parent=true") + .addMapping("parent", "_parent", "type=grandparent,parent=true") .addMapping("child_type_one", "_parent", "type=parent") .addMapping("child_type_two", "_parent", "type=parent")); @@ -1649,63 +1471,10 @@ public void testAddingParentToExistingMapping() throws ElasticsearchException, I } } - @Test - // The SimpleIdReaderTypeCache#docById method used lget, which can't be used if a map is shared. - public void testTopChildrenBug_concurrencyIssue() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("parent") - .addMapping("child", "_parent", "type=parent")); - ensureGreen(); - - // index simple data - client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get(); - client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get(); - client().prepareIndex("test", "child", "c1").setParent("p1").setSource("c_field", "blue").get(); - client().prepareIndex("test", "child", "c2").setParent("p1").setSource("c_field", "red").get(); - client().prepareIndex("test", "child", "c3").setParent("p2").setSource("c_field", "red").get(); - client().admin().indices().prepareRefresh("test").get(); - - int numThreads = 10; - final CountDownLatch latch = new CountDownLatch(numThreads); - final AtomicReference holder = new AtomicReference<>(); - Runnable r = new Runnable() { - @Override - public void run() { - try { - for (int i = 0; i < 100; i++) { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(topChildrenQuery("child", termQuery("c_field", "blue"))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); - - searchResponse = client().prepareSearch("test") - .setQuery(topChildrenQuery("child", termQuery("c_field", "red"))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); - } - } catch (AssertionError error) { - holder.set(error); - } finally { - latch.countDown(); - } - } - }; - - for (int i = 0; i < 10; i++) { - new Thread(r).start(); - } - latch.await(); - if (holder.get() != null) { - throw holder.get(); - } - } - @Test public void testHasChildQueryWithNestedInnerObjects() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent", "objects", "type=nested") + .addMapping("parent", "_parent", "parent=true", "objects", "type=nested") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1747,7 +1516,7 @@ public void testHasChildQueryWithNestedInnerObjects() throws Exception { @Test public void testNamedFilters() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1756,13 +1525,7 @@ public void testNamedFilters() throws Exception { client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "1")).queryName("test")) - .get(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); - - searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max").queryName("test")) + SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max").queryName("test")) .get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); @@ -1826,15 +1589,6 @@ public void testParentChildQueriesNoParentType() throws Exception { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); } - try { - client().prepareSearch("test") - .setQuery(topChildrenQuery("child", termQuery("c_field", "1")).score("max")) - .get(); - fail(); - } catch (SearchPhaseExecutionException e) { - assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); - } - try { client().prepareSearch("test") .setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score")) @@ -1859,7 +1613,9 @@ public void testAdd_ParentFieldAfterIndexingParentDocButBeforeIndexingChildDoc() assertAcked(prepareCreate("test") .setSettings(settingsBuilder() .put(indexSettings()) - .put("index.refresh_interval", -1))); + .put("index.refresh_interval", -1)) + .addMapping("parent", "_parent", "parent=true") + ); ensureGreen(); String parentId = "p1"; @@ -1892,12 +1648,6 @@ public void testAdd_ParentFieldAfterIndexingParentDocButBeforeIndexingChildDoc() assertHitCount(searchResponse, 1l); assertSearchHits(searchResponse, parentId); - searchResponse = client().prepareSearch("test") - .setQuery(topChildrenQuery("child", termQuery("c_field", "1")).score("max")) - .get(); - assertHitCount(searchResponse, 1l); - assertSearchHits(searchResponse, parentId); - searchResponse = client().prepareSearch("test") .setPostFilter(hasParentFilter("parent", termQuery("p_field", "1"))) .get(); @@ -1919,7 +1669,7 @@ public void testParentChildCaching() throws Exception { .put(indexSettings()) .put("index.refresh_interval", -1) ) - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -1965,7 +1715,7 @@ public void testParentChildCaching() throws Exception { @Test public void testParentChildQueriesViaScrollApi() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); for (int i = 0; i < 10; i++) { @@ -1979,8 +1729,7 @@ public void testParentChildQueriesViaScrollApi() throws Exception { hasChildQuery("child", matchAllQuery()), filteredQuery(matchAllQuery(), hasChildFilter("child", matchAllQuery())), hasParentQuery("parent", matchAllQuery()), - filteredQuery(matchAllQuery(), hasParentFilter("parent", matchAllQuery())), - topChildrenQuery("child", matchAllQuery()).factor(10) + filteredQuery(matchAllQuery(), hasParentFilter("parent", matchAllQuery())) }; for (QueryBuilder query : queries) { @@ -2013,6 +1762,7 @@ public void testValidateThatHasChildAndHasParentFilterAreNeverCached() throws Ex .setSettings(builder().put(indexSettings()) //we need 0 replicas here to make sure we always hit the very same shards .put(SETTING_NUMBER_OF_REPLICAS, 0)) + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -2108,7 +1858,7 @@ public void testValidateThatHasChildAndHasParentFilterAreNeverCached() throws Ex @Test public void testQueryBeforeChildType() throws Exception { assertAcked(prepareCreate("test") - .addMapping("features") + .addMapping("features", "_parent", "parent=true") .addMapping("posts", "_parent", "type=features") .addMapping("specials")); ensureGreen(); @@ -2133,7 +1883,7 @@ public void testQueryBeforeChildType() throws Exception { // https://github.com/elasticsearch/elasticsearch/issues/6256 public void testParentFieldInMultiMatchField() throws Exception { assertAcked(prepareCreate("test") - .addMapping("type1") + .addMapping("type1", "_parent", "parent=true") .addMapping("type2", "_parent", "type=type1") ); ensureGreen(); @@ -2152,7 +1902,7 @@ public void testParentFieldInMultiMatchField() throws Exception { @Test public void testTypeIsAppliedInHasParentInnerQuery() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent") + .addMapping("parent", "_parent", "parent=true") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -2247,7 +1997,7 @@ private SearchResponse minMaxFilter(int minChildren, int maxChildren, int cutoff @Test public void testMinMaxChildren() throws Exception { assertAcked(prepareCreate("test") - .addMapping("parent", "id", "type=long") + .addMapping("parent", "_parent", "parent=true", "id", "type=long") .addMapping("child", "_parent", "type=parent")); ensureGreen(); @@ -2660,7 +2410,7 @@ public void testMinMaxChildren() throws Exception { @Test @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/9461") public void testParentFieldToNonExistingType() { - assertAcked(prepareCreate("test").addMapping("parent").addMapping("child", "_parent", "type=parent2")); + assertAcked(prepareCreate("test").addMapping("parent", "_parent", "parent=true").addMapping("child", "_parent", "type=parent2")); client().prepareIndex("test", "parent", "1").setSource("{}").get(); client().prepareIndex("test", "child", "1").setParent("1").setSource("{}").get(); refresh();