Skip to content

Commit

Permalink
Fix bbq index feature exposure for testing & remove feature flag (#11…
Browse files Browse the repository at this point in the history
…4832)

We actually don't need a cluster feature, a capability added if the
feature flag is enabled is enough for testing.

closes #114787
  • Loading branch information
benwtrent authored Oct 15, 2024
1 parent 2f1f24d commit e87b894
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 23 deletions.
2 changes: 0 additions & 2 deletions muted-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -408,8 +408,6 @@ tests:
- class: org.elasticsearch.xpack.enrich.EnrichIT
method: testDeleteExistingPipeline
issue: https://github.com/elastic/elasticsearch/issues/114775
- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT
issue: https://github.com/elastic/elasticsearch/issues/114787
- class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests
method: testNoStream
issue: https://github.com/elastic/elasticsearch/issues/114788
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

package org.elasticsearch.index.mapper;

import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.features.FeatureSpecification;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.index.IndexSettings;
Expand All @@ -29,7 +28,7 @@ public class MapperFeatures implements FeatureSpecification {

@Override
public Set<NodeFeature> getFeatures() {
Set<NodeFeature> features = Set.of(
return Set.of(
BWC_WORKAROUND_9_0,
IgnoredSourceFieldMapper.TRACK_IGNORED_SOURCE,
PassThroughObjectMapper.PASS_THROUGH_PRIORITY,
Expand All @@ -53,13 +52,9 @@ public Set<NodeFeature> getFeatures() {
IndexSettings.IGNORE_ABOVE_INDEX_LEVEL_SETTING,
SourceFieldMapper.SYNTHETIC_SOURCE_COPY_TO_INSIDE_OBJECTS_FIX,
TimeSeriesRoutingHashFieldMapper.TS_ROUTING_HASH_FIELD_PARSES_BYTES_REF,
FlattenedFieldMapper.IGNORE_ABOVE_WITH_ARRAYS_SUPPORT
FlattenedFieldMapper.IGNORE_ABOVE_WITH_ARRAYS_SUPPORT,
DenseVectorFieldMapper.BBQ_FORMAT
);
// BBQ is currently behind a feature flag for testing
if (DenseVectorFieldMapper.BBQ_FEATURE_FLAG.isEnabled()) {
return Sets.union(features, Set.of(DenseVectorFieldMapper.BBQ_FORMAT));
}
return features;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.VectorUtil;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.util.FeatureFlag;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.index.IndexVersion;
Expand Down Expand Up @@ -110,7 +109,6 @@ public static boolean isNotUnitVector(float magnitude) {
public static final NodeFeature INT4_QUANTIZATION = new NodeFeature("mapper.vectors.int4_quantization");
public static final NodeFeature BIT_VECTORS = new NodeFeature("mapper.vectors.bit_vectors");
public static final NodeFeature BBQ_FORMAT = new NodeFeature("mapper.vectors.bbq");
public static final FeatureFlag BBQ_FEATURE_FLAG = new FeatureFlag("bbq_index_format");

public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0;
public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION;
Expand Down Expand Up @@ -2259,9 +2257,6 @@ private static IndexOptions parseIndexOptions(String fieldName, Object propNode)
throw new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]");
}
VectorIndexType parsedType = vectorIndexType.get();
if ((parsedType == VectorIndexType.BBQ_FLAT || parsedType == VectorIndexType.BBQ_HNSW) && BBQ_FEATURE_FLAG.isEnabled() == false) {
throw new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]");
}
return parsedType.parseIndexOptions(fieldName, indexOptionsMap);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@

import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH;
import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN;
import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.BBQ_FEATURE_FLAG;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
Expand Down Expand Up @@ -1228,11 +1227,9 @@ public void testInvalidParameters() {
e.getMessage(),
containsString("Failed to parse mapping: Mapping definition for [field] has unsupported parameters: [foo : {}]")
);
List<String> floatOnlyQuantizations = new ArrayList<>(Arrays.asList("int4_hnsw", "int8_hnsw", "int8_flat", "int4_flat"));
if (BBQ_FEATURE_FLAG.isEnabled()) {
floatOnlyQuantizations.add("bbq_hnsw");
floatOnlyQuantizations.add("bbq_flat");
}
List<String> floatOnlyQuantizations = new ArrayList<>(
Arrays.asList("int4_hnsw", "int8_hnsw", "int8_flat", "int4_flat", "bbq_hnsw", "bbq_flat")
);
for (String quantizationKind : floatOnlyQuantizations) {
e = expectThrows(
MapperParsingException.class,
Expand Down Expand Up @@ -1946,7 +1943,6 @@ public void testKnnQuantizedHNSWVectorsFormat() throws IOException {
}

public void testKnnBBQHNSWVectorsFormat() throws IOException {
assumeTrue("BBQ vectors are not supported in the current version", BBQ_FEATURE_FLAG.isEnabled());
final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10);
final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10);
final int dims = randomIntBetween(64, 4096);
Expand Down Expand Up @@ -1985,7 +1981,6 @@ public void testKnnBBQHNSWVectorsFormat() throws IOException {
}

public void testInvalidVectorDimensionsBBQ() {
assumeTrue("BBQ vectors are not supported in the current version", BBQ_FEATURE_FLAG.isEnabled());
for (String quantizedFlatFormat : new String[] { "bbq_hnsw", "bbq_flat" }) {
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
b.field("type", "dense_vector");
Expand Down

0 comments on commit e87b894

Please sign in to comment.