Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SOLR-17022: Support for glob patterns for fields in Export handler, Stream handler and with SelectStream streaming expression #1996

Merged
merged 14 commits into from
Dec 22, 2023
Merged
77 changes: 43 additions & 34 deletions solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.io.PrintWriter;
import java.lang.invoke.MethodHandles;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
Expand Down Expand Up @@ -76,6 +77,7 @@
import org.apache.solr.schema.StrField;
import org.apache.solr.search.DocValuesIteratorCache;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SolrReturnFields;
import org.apache.solr.search.SortSpec;
import org.apache.solr.search.SyntaxError;
import org.slf4j.Logger;
Expand Down Expand Up @@ -121,7 +123,7 @@ public boolean write(
private int priorityQueueSize;
StreamExpression streamExpression;
StreamContext streamContext;
FieldWriter[] fieldWriters;
List<FieldWriter> fieldWriters;
int totalHits = 0;
FixedBitSet[] sets = null;
PushWriter writer;
Expand Down Expand Up @@ -293,7 +295,7 @@ private void _write(OutputStream os) throws IOException {
}

try {
fieldWriters = getFieldWriters(fields, req.getSearcher());
fieldWriters = getFieldWriters(fields, req);
} catch (Exception e) {
writeException(e, writer, true);
return;
Expand Down Expand Up @@ -473,7 +475,7 @@ void fillOutDocs(MergeIterator mergeIterator, ExportBuffers.Buffer buffer) throw
}

void writeDoc(
SortDoc sortDoc, List<LeafReaderContext> leaves, EntryWriter ew, FieldWriter[] writers)
SortDoc sortDoc, List<LeafReaderContext> leaves, EntryWriter ew, List<FieldWriter> writers)
throws IOException {
int ord = sortDoc.ord;
LeafReaderContext context = leaves.get(ord);
Expand All @@ -485,82 +487,89 @@ void writeDoc(
}
}

public FieldWriter[] getFieldWriters(String[] fields, SolrIndexSearcher searcher)
public List<FieldWriter> getFieldWriters(String[] fields, SolrQueryRequest req)
throws IOException {
IndexSchema schema = searcher.getSchema();
FieldWriter[] writers = new FieldWriter[fields.length];
DocValuesIteratorCache dvIterCache = new DocValuesIteratorCache(searcher, false);
for (int i = 0; i < fields.length; i++) {
String field = fields[i];
SchemaField schemaField = null;
DocValuesIteratorCache dvIterCache = new DocValuesIteratorCache(req.getSearcher(), false);

try {
schemaField = schema.getField(field);
} catch (Exception e) {
throw new IOException(e);
}
SolrReturnFields solrReturnFields = new SolrReturnFields(fields, req);

List<FieldWriter> writers = new ArrayList<>();
for (String field : req.getSearcher().getFieldNames()) {
if (!solrReturnFields.wantsField(field)) {
continue;
}
SchemaField schemaField = req.getSchema().getField(field);
if (!schemaField.hasDocValues()) {
throw new IOException(schemaField + " must have DocValues to use this feature.");
}
boolean multiValued = schemaField.multiValued();
FieldType fieldType = schemaField.getType();

if (fieldType instanceof SortableTextField && schemaField.useDocValuesAsStored() == false) {
throw new IOException(
schemaField + " Must have useDocValuesAsStored='true' to be used with export writer");
FieldWriter writer;

if (fieldType instanceof SortableTextField && !schemaField.useDocValuesAsStored()) {
if (solrReturnFields.getRequestedFieldNames() != null
&& solrReturnFields.getRequestedFieldNames().contains(field)) {
// Explicitly requested field cannot be used due to not having useDocValuesAsStored=true,
// throw exception
throw new IOException(
schemaField + " Must have useDocValuesAsStored='true' to be used with export writer");
} else {
// Glob pattern matched field cannot be used due to not having useDocValuesAsStored=true
continue;
}
}

DocValuesIteratorCache.FieldDocValuesSupplier docValuesCache = dvIterCache.getSupplier(field);

if (docValuesCache == null) {
writers[i] = EMPTY_FIELD_WRITER;
writer = EMPTY_FIELD_WRITER;
} else if (fieldType instanceof IntValueFieldType) {
if (multiValued) {
writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
writer = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
} else {
writers[i] = new IntFieldWriter(field, docValuesCache);
writer = new IntFieldWriter(field, docValuesCache);
}
} else if (fieldType instanceof LongValueFieldType) {
if (multiValued) {
writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
writer = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
} else {
writers[i] = new LongFieldWriter(field, docValuesCache);
writer = new LongFieldWriter(field, docValuesCache);
}
} else if (fieldType instanceof FloatValueFieldType) {
if (multiValued) {
writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
writer = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
} else {
writers[i] = new FloatFieldWriter(field, docValuesCache);
writer = new FloatFieldWriter(field, docValuesCache);
}
} else if (fieldType instanceof DoubleValueFieldType) {
if (multiValued) {
writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
writer = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
} else {
writers[i] = new DoubleFieldWriter(field, docValuesCache);
writer = new DoubleFieldWriter(field, docValuesCache);
}
} else if (fieldType instanceof StrField || fieldType instanceof SortableTextField) {
if (multiValued) {
writers[i] = new MultiFieldWriter(field, fieldType, schemaField, false, docValuesCache);
writer = new MultiFieldWriter(field, fieldType, schemaField, false, docValuesCache);
} else {
writers[i] = new StringFieldWriter(field, fieldType, docValuesCache);
writer = new StringFieldWriter(field, fieldType, docValuesCache);
}
} else if (fieldType instanceof DateValueFieldType) {
if (multiValued) {
writers[i] = new MultiFieldWriter(field, fieldType, schemaField, false, docValuesCache);
writer = new MultiFieldWriter(field, fieldType, schemaField, false, docValuesCache);
} else {
writers[i] = new DateFieldWriter(field, docValuesCache);
writer = new DateFieldWriter(field, docValuesCache);
}
} else if (fieldType instanceof BoolField) {
if (multiValued) {
writers[i] = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
writer = new MultiFieldWriter(field, fieldType, schemaField, true, docValuesCache);
} else {
writers[i] = new BoolFieldWriter(field, fieldType, docValuesCache);
writer = new BoolFieldWriter(field, fieldType, docValuesCache);
}
} else {
throw new IOException(
"Export fields must be one of the following types: int,float,long,double,string,date,boolean,SortableText");
}
writers.add(writer);
}
return writers;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import org.apache.commons.io.FilenameUtils;
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.QueryValueSource;
Expand All @@ -37,6 +36,7 @@
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.GlobPatternUtil;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.transform.DocTransformer;
import org.apache.solr.response.transform.DocTransformers;
Expand Down Expand Up @@ -577,8 +577,7 @@ public boolean wantsField(String name) {
return true;
}
for (String s : globs) {
// TODO something better?
if (FilenameUtils.wildcardMatch(name, s)) {
if (GlobPatternUtil.matches(s, name)) {
okFieldNames.add(name); // Don't calculate it again
return true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1298,6 +1298,43 @@ public void testExpr() throws Exception {
.contains("Must have useDocValuesAsStored='true'"));
}

@Test
public void testGlobFields() throws Exception {
assertU(delQ("*:*"));
assertU(commit());
createLargeIndex();
SolrQueryRequest req =
req("q", "*:*", "qt", "/export", "fl", "id,*_udvas,*_i_p", "sort", "id asc");
assertJQ(
req,
"response/numFound==100000",
"response/docs/[0]/id=='0'",
"response/docs/[1]/id=='1'",
"response/docs/[0]/sortabledv_udvas=='0'",
"response/docs/[1]/sortabledv_udvas=='1'",
"response/docs/[0]/small_i_p==0",
"response/docs/[1]/small_i_p==1");

assertU(delQ("*:*"));
assertU(commit());
createLargeIndex();
req = req("q", "*:*", "qt", "/export", "fl", "*", "sort", "id asc");
assertJQ(
req,
"response/numFound==100000",
"response/docs/[0]/id=='0'",
"response/docs/[1]/id=='1'",
"response/docs/[0]/sortabledv_udvas=='0'",
"response/docs/[1]/sortabledv_udvas=='1'",
"response/docs/[0]/small_i_p==0",
"response/docs/[1]/small_i_p==1");

String jq = JQ(req);
assertFalse(
"Fields without docvalues and useDocValuesAsStored should not be returned",
jq.contains("\"sortabledv\""));
}

@SuppressWarnings("rawtypes")
private void validateSort(int numDocs) throws Exception {
// 10 fields
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,10 @@ It can get worse otherwise.
The `fl` property defines the fields that will be exported with the result set.
Any of the field types that can be sorted (i.e., int, long, float, double, string, date, boolean) can be used in the field list.
The fields can be single or multi-valued.
However, returning scores and wildcards are not supported at this time.

Wildcard patterns can be used for the field list (e.g. `fl=*_i`) and will be expanded to the list of fields that match the pattern and are able to be exported, see <<Field Requirements>>.

Returning scores is not supported at this time.

=== Specifying the Local Streaming Expression

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1375,7 +1375,7 @@ One can provide a list of operations and evaluators to perform on any fields, su
=== select Parameters

* `StreamExpression`
* `fieldName`: name of field to include in the output tuple (can include multiple of these), such as `outputTuple[fieldName] = inputTuple[fieldName]`
* `fieldName`: name of field to include in the output tuple (can include multiple of these), such as `outputTuple[fieldName] = inputTuple[fieldName]`. The `fieldName` can be a wildcard pattern, e.g. `a_*` to select all fields that start with `a_`.
* `fieldName as aliasFieldName`: aliased field name to include in the output tuple (can include multiple of these), such as `outputTuple[aliasFieldName] = incomingTuple[fieldName]`
* `replace(fieldName, value, withValue=replacementValue)`: if `incomingTuple[fieldName] == value` then `outgoingTuple[fieldName]` will be set to `replacementValue`.
`value` can be the string "null" to replace a null value with some other value.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParser;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import org.apache.solr.common.util.GlobPatternUtil;

/**
* Selects fields from the incoming stream and applies optional field renaming. Does not reorder the
Expand All @@ -52,14 +53,21 @@ public class SelectStream extends TupleStream implements Expressible {
private TupleStream stream;
private StreamContext streamContext;
private Map<String, String> selectedFields;
private List<String> selectedFieldGlobPatterns;
private Map<StreamEvaluator, String> selectedEvaluators;
private List<StreamOperation> operations;

public SelectStream(TupleStream stream, List<String> selectedFields) throws IOException {
this.stream = stream;
this.selectedFields = new HashMap<>();
this.selectedFieldGlobPatterns = new ArrayList<>();
for (String selectedField : selectedFields) {
this.selectedFields.put(selectedField, selectedField);
if (selectedField.contains("*")) {
// selected field is a glob pattern
this.selectedFieldGlobPatterns.add(selectedField);
} else {
this.selectedFields.put(selectedField, selectedField);
}
}
operations = new ArrayList<>();
selectedEvaluators = new LinkedHashMap<>();
Expand All @@ -68,6 +76,7 @@ public SelectStream(TupleStream stream, List<String> selectedFields) throws IOEx
public SelectStream(TupleStream stream, Map<String, String> selectedFields) throws IOException {
this.stream = stream;
this.selectedFields = selectedFields;
selectedFieldGlobPatterns = new ArrayList<>();
operations = new ArrayList<>();
selectedEvaluators = new LinkedHashMap<>();
}
Expand Down Expand Up @@ -123,6 +132,7 @@ public SelectStream(StreamExpression expression, StreamFactory factory) throws I
stream = factory.constructStream(streamExpressions.get(0));

selectedFields = new HashMap<>();
selectedFieldGlobPatterns = new ArrayList<>();
selectedEvaluators = new LinkedHashMap<>();
for (StreamExpressionParameter parameter : selectAsFieldsExpressions) {
StreamExpressionValue selectField = (StreamExpressionValue) parameter;
Expand Down Expand Up @@ -175,7 +185,11 @@ public SelectStream(StreamExpression expression, StreamFactory factory) throws I
selectedFields.put(asValue, asName);
}
} else {
selectedFields.put(value, value);
if (value.contains("*")) {
selectedFieldGlobPatterns.add(value);
} else {
selectedFields.put(value, value);
}
}
}

Expand Down Expand Up @@ -217,6 +231,11 @@ private StreamExpression toExpression(StreamFactory factory, boolean includeStre
}
}

// selected glob patterns
for (String selectFieldGlobPattern : selectedFieldGlobPatterns) {
expression.addParameter(selectFieldGlobPattern);
}

// selected evaluators
for (Map.Entry<StreamEvaluator, String> selectedEvaluator : selectedEvaluators.entrySet()) {
expression.addParameter(
Expand Down Expand Up @@ -308,6 +327,13 @@ public Tuple read() throws IOException {
workingForEvaluators.put(fieldName, original.get(fieldName));
if (selectedFields.containsKey(fieldName)) {
workingToReturn.put(selectedFields.get(fieldName), original.get(fieldName));
} else {
for (String globPattern : selectedFieldGlobPatterns) {
if (GlobPatternUtil.matches(globPattern, fieldName)) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't SelectStream also use SolrReturnFields and not use lower level GlobPattern stuff (it's something SRF can handle)?

Disclaimer: I haven't looked at this PR in a long time.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

solrj-streaming does not currently have a dependency on core, in fact currently I think core depends on solrj-streaming. I didn't want to refactor SolrReturnFields to live elsewhere given the scope of this PR so not using that.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh right; of course.

workingToReturn.put(fieldName, original.get(fieldName));
break;
}
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,14 +105,15 @@ public void testSelectStream() throws Exception {
try (SelectStream stream =
new SelectStream(
StreamExpressionParser.parse(
"select(\"a_s as fieldA\", search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"))"),
"select(\"a_s as fieldA\", a_*, search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"))"),
factory)) {
expressionString = stream.toExpression(factory).toString();
assertTrue(expressionString.contains("select(search(collection1,"));
assertTrue(expressionString.contains("q=\"*:*\""));
assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
assertTrue(expressionString.contains("sort=\"a_f asc, a_i asc\""));
assertTrue(expressionString.contains("a_s as fieldA"));
assertTrue(expressionString.contains("a_*"));
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.common.util;
justinrsweeney marked this conversation as resolved.
Show resolved Hide resolved

import java.nio.file.FileSystems;
import java.nio.file.Paths;

/** Provides methods for matching glob patterns against input strings. */
public class GlobPatternUtil {
dsmiley marked this conversation as resolved.
Show resolved Hide resolved

/**
* Matches an input string against a provided glob patterns. This uses Java NIO FileSystems
* PathMatcher to match glob patterns in the same way to how glob patterns are matches for file
* paths, rather than implementing our own glob pattern matching.
*
* @param pattern the glob pattern to match against
* @param input the input string to match against a glob pattern
* @return true if the input string matches the glob pattern, false otherwise
*/
public static boolean matches(String pattern, String input) {
return FileSystems.getDefault().getPathMatcher("glob:" + pattern).matches(Paths.get(input));
Copy link
Contributor

@dsmiley dsmiley Oct 13, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is bizarre just for some field wildcard support. If there is a reason we use FileSystems then a comment is necessary.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I took a look at how it's implemented. If only we could call ZipUtils.toRegexPattern but the class is package protected. It's a shame to recompile the glob on each call to matches!

}
}
Loading
Loading