Skip to content

Commit

Permalink
Merge branch 'master' into fix-unreleased-versions
Browse files Browse the repository at this point in the history
* master:
  Enhances exists queries to reduce need for `_field_names` (elastic#26930)
  Added new terms_set query
  Set request body to required to reflect the code base (elastic#27188)
  Update Docker docs for 6.0.0-rc2 (elastic#27166)
  Add version 6.0.0
  Docs: restore now fails if it encounters incompatible settings (elastic#26933)
  Convert index blocks to cluster block exceptions (elastic#27050)
  [DOCS] Link remote info API in Cross Cluster Search docs page
  Fix Laplace scorer to multiply by alpha (and not add) (elastic#27125)
  [DOCS] Clarify migrate guide and search request validation
  Raise IllegalArgumentException if query validation failed (elastic#26811)
  prevent duplicate fields when mixing parent and root nested includes (elastic#27072)
  TopHitsAggregator must propagate calls to `setScorer`. (elastic#27138)
  • Loading branch information
jasontedor committed Nov 1, 2017
2 parents d0bb219 + 99aca9c commit 23dc54b
Show file tree
Hide file tree
Showing 81 changed files with 3,218 additions and 290 deletions.
5 changes: 5 additions & 0 deletions core/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,9 @@ public class Version implements Comparable<Version> {
public static final int V_6_0_0_rc2_ID = 6000052;
public static final Version V_6_0_0_rc2 =
new Version(V_6_0_0_rc2_ID, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final int V_6_0_0_ID = 6000099;
public static final Version V_6_0_0 =
new Version(V_6_0_0_ID, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final int V_6_1_0_ID = 6010099;
public static final Version V_6_1_0 =
new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
Expand All @@ -143,6 +146,8 @@ public static Version fromId(int id) {
return V_7_0_0_alpha1;
case V_6_1_0_ID:
return V_6_1_0;
case V_6_0_0_ID:
return V_6_0_0;
case V_6_0_0_rc2_ID:
return V_6_0_0_rc2;
case V_6_0_0_beta2_ID:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ protected DeleteIndexTemplateResponse newResponse() {

@Override
protected ClusterBlockException checkBlock(DeleteIndexTemplateRequest request, ClusterState state) {
return state.blocks().indexBlockedException(ClusterBlockLevel.METADATA_WRITE, "");
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ protected PutIndexTemplateResponse newResponse() {

@Override
protected ClusterBlockException checkBlock(PutIndexTemplateRequest request, ClusterState state) {
return state.blocks().indexBlockedException(ClusterBlockLevel.METADATA_WRITE, "");
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,14 @@
package org.elasticsearch.index.mapper;

import com.carrotsearch.hppc.ObjectArrayList;

import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
Expand Down Expand Up @@ -126,6 +130,15 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
return new BytesBinaryDVIndexFieldData.Builder();
}

@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "Binary fields do not support searching");
Expand Down Expand Up @@ -165,6 +178,11 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
} else {
field.add(value);
}
} else {
// Only add an entry to the field names field if the field is stored
// but has no doc values so exists query will work on a field with
// no doc values
createFieldNamesField(context, fields);
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,10 @@
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
Expand Down Expand Up @@ -136,6 +139,15 @@ public String typeName() {
return CONTENT_TYPE;
}

@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}

@Override
public Boolean nullValue() {
return (Boolean)super.nullValue();
Expand Down Expand Up @@ -253,6 +265,8 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
}
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0));
} else {
createFieldNamesField(context, fields);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.suggest.document.Completion50PostingsFormat;
import org.apache.lucene.search.suggest.document.CompletionAnalyzer;
import org.apache.lucene.search.suggest.document.CompletionQuery;
Expand All @@ -40,11 +42,13 @@
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
Expand Down Expand Up @@ -257,6 +261,11 @@ public static synchronized PostingsFormat postingsFormat() {
return postingsFormat;
}

@Override
public Query existsQuery(QueryShardContext context) {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}

/**
* Completion prefix query
*/
Expand Down Expand Up @@ -456,6 +465,11 @@ public Mapper parse(ParseContext context) throws IOException {
context.doc().add(new SuggestField(fieldType().name(), input, metaData.weight));
}
}
List<IndexableField> fields = new ArrayList<>(1);
createFieldNamesField(context, fields);
for (IndexableField field : fields) {
context.doc().add(field);
}
multiFields.parse(this, context);
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,12 @@
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
Expand Down Expand Up @@ -245,6 +248,15 @@ long parse(String value) {
return dateTimeFormatter().parser().parseMillis(value);
}

@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}

@Override
public Query termQuery(Object value, @Nullable QueryShardContext context) {
Query query = rangeQuery(value, value, true, true, ShapeRelation.INTERSECTS, null, null, context);
Expand Down Expand Up @@ -451,6 +463,8 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
}
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType().name(), timestamp));
} else if (fieldType().stored() || fieldType().indexOptions() != IndexOptions.NONE) {
createFieldNamesField(context, fields);
}
if (fieldType().stored()) {
fields.add(new StoredField(fieldType().name(), timestamp));
Expand Down
12 changes: 12 additions & 0 deletions core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;

import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
Expand All @@ -33,6 +34,7 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper.FieldNamesFieldType;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService;

Expand Down Expand Up @@ -285,6 +287,16 @@ public Mapper parse(ParseContext context) throws IOException {
*/
protected abstract void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException;

protected void createFieldNamesField(ParseContext context, List<IndexableField> fields) {
FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.docMapper()
.metadataMapper(FieldNamesFieldMapper.class).fieldType();
if (fieldNamesFieldType != null && fieldNamesFieldType.isEnabled()) {
for (String fieldName : FieldNamesFieldMapper.extractFieldNames(fieldType().name())) {
fields.add(new Field(FieldNamesFieldMapper.NAME, fieldName, fieldNamesFieldType));
}
}
}

@Override
public Iterator<Mapper> iterator() {
return multiFields.iterator();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand All @@ -44,6 +48,9 @@
*/
public class FieldNamesFieldMapper extends MetadataFieldMapper {

private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(
ESLoggerFactory.getLogger(FieldNamesFieldMapper.class));

public static final String NAME = "_field_names";

public static final String CONTENT_TYPE = "_field_names";
Expand Down Expand Up @@ -178,11 +185,18 @@ public boolean isEnabled() {
return enabled;
}

@Override
public Query existsQuery(QueryShardContext context) {
throw new UnsupportedOperationException("Cannot run exists query on _field_names");
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
if (isEnabled() == false) {
throw new IllegalStateException("Cannot run [exists] queries if the [_field_names] field is disabled");
}
DEPRECATION_LOGGER.deprecated(
"terms query on the _field_names field is deprecated and will be removed, use exists query instead");
return super.termQuery(value, context);
}
}
Expand All @@ -206,12 +220,14 @@ public void preParse(ParseContext context) throws IOException {

@Override
public void postParse(ParseContext context) throws IOException {
super.parse(context);
if (context.indexSettings().getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).before(Version.V_6_1_0)) {
super.parse(context);
}
}

@Override
public Mapper parse(ParseContext context) throws IOException {
// we parse in post parse
// Adding values to the _field_names field is handled by the mappers for each field type
return null;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,10 @@
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoPoint;
Expand All @@ -37,6 +40,7 @@
import org.elasticsearch.index.query.QueryShardException;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -180,6 +184,15 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
return new AbstractLatLonPointDVIndexFieldData.Builder();
}

@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead: ["
Expand Down Expand Up @@ -207,6 +220,12 @@ protected void parse(ParseContext context, GeoPoint point) throws IOException {
}
if (fieldType.hasDocValues()) {
context.doc().add(new LatLonDocValuesField(fieldType().name(), point.lat(), point.lon()));
} else if (fieldType().stored() || fieldType().indexOptions() != IndexOptions.NONE) {
List<IndexableField> fields = new ArrayList<>(1);
createFieldNamesField(context, fields);
for (IndexableField field : fields) {
context.doc().add(field);
}
}
// if the mapping contains multifields then use the geohash string
if (multiFields.iterator().hasNext()) {
Expand Down
Loading

0 comments on commit 23dc54b

Please sign in to comment.