-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Introduce new setting search.concurrent.max_slice to control the slic…
…e computation for concurrent segment search. It uses lucene default mechanism if the setting value is <=0 otherwise uses custom max target slice mechanism Signed-off-by: Sorabh Hamirwasia <[email protected]>
- Loading branch information
Showing
11 changed files
with
330 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
38 changes: 38 additions & 0 deletions
38
server/src/main/java/org/opensearch/search/SearchBootstrapSettings.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
/* | ||
* SPDX-License-Identifier: Apache-2.0 | ||
* | ||
* The OpenSearch Contributors require contributions made to | ||
* this file be licensed under the Apache-2.0 license or a | ||
* compatible open source license. | ||
*/ | ||
|
||
package org.opensearch.search; | ||
|
||
import org.opensearch.common.settings.Setting; | ||
import org.opensearch.common.settings.Settings; | ||
|
||
/** | ||
* Keeps track of all the search related node level settings which can be accessed via static methods | ||
*/ | ||
public class SearchBootstrapSettings { | ||
// settings to configure maximum slice created per search request using OS custom slice computation mechanism. Default lucene | ||
// mechanism will not be used if this setting is set with value > 0 | ||
public static final String CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY = "search.concurrent.max_slice"; | ||
public static final int CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE = -1; | ||
|
||
// value <= 0 means lucene slice computation will be used | ||
public static final Setting<Integer> CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING = Setting.intSetting( | ||
CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, | ||
CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE, | ||
Setting.Property.NodeScope | ||
); | ||
private static Settings settings; | ||
|
||
public static void initialize(Settings openSearchSettings) { | ||
settings = openSearchSettings; | ||
} | ||
|
||
public static int getValueAsInt(String settingName, int defaultValue) { | ||
return (settings != null) ? settings.getAsInt(settingName, defaultValue) : defaultValue; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
61 changes: 61 additions & 0 deletions
61
server/src/main/java/org/opensearch/search/internal/MaxTargetSliceSupplier.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
/* | ||
* SPDX-License-Identifier: Apache-2.0 | ||
* | ||
* The OpenSearch Contributors require contributions made to | ||
* this file be licensed under the Apache-2.0 license or a | ||
* compatible open source license. | ||
*/ | ||
|
||
package org.opensearch.search.internal; | ||
|
||
import org.apache.lucene.index.LeafReaderContext; | ||
import org.apache.lucene.search.IndexSearcher; | ||
|
||
import java.util.ArrayList; | ||
import java.util.Collections; | ||
import java.util.Comparator; | ||
import java.util.List; | ||
|
||
/** | ||
* Supplier to compute leaf slices based on passed in leaves and max target slice count to limit the number of computed slices. It sorts | ||
* all the leaves based on document count and then assign each leaf in round-robin fashion to the target slice count slices. Based on | ||
* experiment results as shared in <a href=https://github.com/opensearch-project/OpenSearch/issues/7358>issue-7358</a> | ||
* we can see this mechanism helps to achieve better tail/median latency over default lucene slice computation. | ||
*/ | ||
public class MaxTargetSliceSupplier { | ||
|
||
public static IndexSearcher.LeafSlice[] getSlices(List<LeafReaderContext> leaves, int target_max_slice) { | ||
if (target_max_slice <= 0) { | ||
throw new IllegalArgumentException("MaxTargetSliceSupplier called with unexpected slice count of " + target_max_slice); | ||
} | ||
|
||
// slice count should not exceed the segment count | ||
int target_slice_count = Math.min(target_max_slice, leaves.size()); | ||
|
||
// Make a copy so we can sort: | ||
List<LeafReaderContext> sortedLeaves = new ArrayList<>(leaves); | ||
|
||
// Sort by maxDoc, descending: | ||
sortedLeaves.sort(Collections.reverseOrder(Comparator.comparingInt(l -> l.reader().maxDoc()))); | ||
|
||
final List<List<LeafReaderContext>> groupedLeaves = new ArrayList<>(); | ||
for (int i = 0; i < target_slice_count; ++i) { | ||
groupedLeaves.add(new ArrayList<>()); | ||
} | ||
// distribute the slices in round-robin fashion | ||
List<LeafReaderContext> group; | ||
for (int idx = 0; idx < sortedLeaves.size(); ++idx) { | ||
int currentGroup = idx % target_slice_count; | ||
group = groupedLeaves.get(currentGroup); | ||
group.add(sortedLeaves.get(idx)); | ||
} | ||
|
||
IndexSearcher.LeafSlice[] slices = new IndexSearcher.LeafSlice[target_slice_count]; | ||
int upto = 0; | ||
for (List<LeafReaderContext> currentLeaf : groupedLeaves) { | ||
slices[upto] = new IndexSearcher.LeafSlice(currentLeaf); | ||
++upto; | ||
} | ||
return slices; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
51 changes: 51 additions & 0 deletions
51
server/src/test/java/org/opensearch/search/internal/IndexReaderUtils.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
/* | ||
* SPDX-License-Identifier: Apache-2.0 | ||
* | ||
* The OpenSearch Contributors require contributions made to | ||
* this file be licensed under the Apache-2.0 license or a | ||
* compatible open source license. | ||
*/ | ||
|
||
package org.opensearch.search.internal; | ||
|
||
import org.apache.lucene.analysis.standard.StandardAnalyzer; | ||
import org.apache.lucene.document.Document; | ||
import org.apache.lucene.document.Field; | ||
import org.apache.lucene.document.StringField; | ||
import org.apache.lucene.index.DirectoryReader; | ||
import org.apache.lucene.index.IndexWriter; | ||
import org.apache.lucene.index.IndexWriterConfig; | ||
import org.apache.lucene.index.LeafReaderContext; | ||
import org.apache.lucene.index.NoMergePolicy; | ||
import org.apache.lucene.store.Directory; | ||
|
||
import java.util.List; | ||
|
||
import static org.apache.lucene.tests.util.LuceneTestCase.newDirectory; | ||
|
||
public class IndexReaderUtils { | ||
|
||
/** | ||
* Utility to create leafCount number of {@link LeafReaderContext} | ||
* @param leafCount count of leaves to create | ||
* @return created leaves | ||
*/ | ||
public static List<LeafReaderContext> getLeaves(int leafCount) throws Exception { | ||
final Directory directory = newDirectory(); | ||
IndexWriter iw = new IndexWriter(directory, new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE)); | ||
for (int i = 0; i < leafCount; ++i) { | ||
Document document = new Document(); | ||
final String fieldValue = "value" + i; | ||
document.add(new StringField("field1", fieldValue, Field.Store.NO)); | ||
document.add(new StringField("field2", fieldValue, Field.Store.NO)); | ||
iw.addDocument(document); | ||
iw.commit(); | ||
} | ||
iw.close(); | ||
DirectoryReader directoryReader = DirectoryReader.open(directory); | ||
List<LeafReaderContext> leaves = directoryReader.leaves(); | ||
directoryReader.close(); | ||
directory.close(); | ||
return leaves; | ||
} | ||
} |
Oops, something went wrong.