Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Verify feature usage in REST tests #106800

Merged
merged 10 commits into from
Mar 28, 2024
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.PathUtils;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.UpdateForV9;
import org.elasticsearch.features.FeatureSpecification;
Expand Down Expand Up @@ -88,11 +87,9 @@
import org.junit.Before;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UncheckedIOException;
import java.nio.CharBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
Expand Down Expand Up @@ -365,23 +362,16 @@ protected final TestFeatureService createTestFeatureService(
Set<Version> semanticNodeVersions
) {
// Historical features information is unavailable when using legacy test plugins
boolean hasHistoricalFeaturesInformation = System.getProperty("tests.features.metadata.path") != null;

final List<FeatureSpecification> featureSpecifications = new ArrayList<>(createAdditionalFeatureSpecifications());
featureSpecifications.add(new RestTestLegacyFeatures());
if (hasHistoricalFeaturesInformation) {
featureSpecifications.add(new ESRestTestCaseHistoricalFeatures());
} else {
if (ESRestTestFeatureService.hasFeatureMetadata() == false) {
logger.warn(
"This test is running on the legacy test framework; historical features from production code will not be available. "
+ "You need to port the test to the new test plugins in order to use historical features from production code. "
+ "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as {}.",
RestTestLegacyFeatures.class.getCanonicalName()
);
}

return new ESRestTestFeatureService(
featureSpecifications,
createAdditionalFeatureSpecifications(),
semanticNodeVersions,
ClusterFeatures.calculateAllNodeFeatures(clusterStateFeatures.values())
);
Expand Down Expand Up @@ -2413,42 +2403,6 @@ private static boolean isMlEnabled() {
}
}

private static class ESRestTestCaseHistoricalFeatures implements FeatureSpecification {
private static Map<NodeFeature, Version> historicalFeatures;

@Override
@SuppressForbidden(reason = "File#pathSeparator has not equivalent in java.nio.file")
public Map<NodeFeature, Version> getHistoricalFeatures() {
if (historicalFeatures == null) {
Map<NodeFeature, Version> historicalFeaturesMap = new HashMap<>();
String metadataPath = System.getProperty("tests.features.metadata.path");
if (metadataPath == null) {
throw new UnsupportedOperationException(
"Historical features information is unavailable when using legacy test plugins."
);
}

String[] metadataFiles = metadataPath.split(File.pathSeparator);
for (String metadataFile : metadataFiles) {
try (
InputStream in = Files.newInputStream(PathUtils.get(metadataFile));
XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, in)
) {
for (Map.Entry<String, String> entry : parser.mapStrings().entrySet()) {
historicalFeaturesMap.put(new NodeFeature(entry.getKey()), Version.fromString(entry.getValue()));
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}

historicalFeatures = Collections.unmodifiableMap(historicalFeaturesMap);
}

return historicalFeatures;
}
}

public static void setIgnoredErrorResponseCodes(Request request, RestStatus... restStatuses) {
request.addParameter(
IGNORE_RESPONSE_CODES_PARAM,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,57 +10,136 @@

import org.elasticsearch.Version;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.core.PathUtils;
import org.elasticsearch.core.Strings;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.features.FeatureData;
import org.elasticsearch.features.FeatureSpecification;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.json.JsonXContent;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.NavigableMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.function.BiConsumer;

import static java.util.Collections.emptySet;

class ESRestTestFeatureService implements TestFeatureService {
private final Predicate<String> historicalFeaturesPredicate;
private final Set<String> clusterStateFeatures;
private final Set<String> allSupportedFeatures;
private final Set<String> knownHistoricalFeatureNames;

ESRestTestFeatureService(
List<? extends FeatureSpecification> specs,
Collection<Version> nodeVersions,
Set<String> clusterStateFeatures
) {
var minNodeVersion = nodeVersions.stream().min(Comparator.naturalOrder());
var featureData = FeatureData.createFromSpecifications(specs);
var historicalFeatures = featureData.getHistoricalFeatures();
Set<String> allHistoricalFeatures = historicalFeatures.lastEntry() == null ? Set.of() : historicalFeatures.lastEntry().getValue();

this.allSupportedFeatures = Sets.union(clusterStateFeatures, minNodeVersion.<Set<String>>map(v -> {
var historicalFeaturesForVersion = historicalFeatures.floorEntry(v);
return historicalFeaturesForVersion == null ? Set.of() : historicalFeaturesForVersion.getValue();
}).orElse(allHistoricalFeatures));

this.historicalFeaturesPredicate = minNodeVersion.<Predicate<String>>map(
v -> featureId -> hasHistoricalFeature(historicalFeatures, v, featureId)
).orElse(featureId -> true); // We can safely assume that new non-semantic versions (serverless) support all historical features
ESRestTestFeatureService(List<FeatureSpecification> featureSpecs, Collection<Version> nodeVersions, Set<String> clusterStateFeatures) {
List<FeatureSpecification> specs = new ArrayList<>(featureSpecs);
specs.add(new RestTestLegacyFeatures());
if (MetadataHolder.HISTORICAL_FEATURES != null) {
specs.add(MetadataHolder.HISTORICAL_FEATURES);
mosche marked this conversation as resolved.
Show resolved Hide resolved
}
var historicalFeatures = FeatureData.createFromSpecifications(specs).getHistoricalFeatures();
this.knownHistoricalFeatureNames = Optional.ofNullable(historicalFeatures.lastEntry()).map(Map.Entry::getValue).orElse(emptySet());
thecoop marked this conversation as resolved.
Show resolved Hide resolved
this.clusterStateFeatures = clusterStateFeatures;
this.allSupportedFeatures = Sets.union(
clusterStateFeatures,
nodeVersions.stream()
.min(Comparator.naturalOrder())
.map(v -> Optional.ofNullable(historicalFeatures.floorEntry(v)).map(Map.Entry::getValue).orElse(emptySet()))
.orElse(knownHistoricalFeatureNames)
thecoop marked this conversation as resolved.
Show resolved Hide resolved
);
}

private static boolean hasHistoricalFeature(NavigableMap<Version, Set<String>> historicalFeatures, Version version, String featureId) {
var features = historicalFeatures.floorEntry(version);
return features != null && features.getValue().contains(featureId);
public static boolean hasFeatureMetadata() {
return MetadataHolder.HISTORICAL_FEATURES != null;
}

@Override
public boolean clusterHasFeature(String featureId) {
if (clusterStateFeatures.contains(featureId)) {
return true;
if (hasFeatureMetadata()
&& MetadataHolder.FEATURE_NAMES.contains(featureId) == false
&& knownHistoricalFeatureNames.contains(featureId) == false) {
throw new IllegalArgumentException(
Strings.format(
"Unknown feature %s: check the feature has been added to the correct FeatureSpecification in the relevant module or, "
+ "if this is a legacy feature used only in tests, to a test-only FeatureSpecification such as %s.",
featureId,
RestTestLegacyFeatures.class.getCanonicalName()
)
);
}
return historicalFeaturesPredicate.test(featureId);
if (MetadataHolder.FEATURE_NAMES.contains(featureId)) {
return clusterStateFeatures.contains(featureId);
}
return allSupportedFeatures.contains(featureId);
mosche marked this conversation as resolved.
Show resolved Hide resolved
}

@Override
public Set<String> getAllSupportedFeatures() {
return allSupportedFeatures;
}

private static class MetadataHolder {
private record HistoricalFeatureSpec(Map<NodeFeature, Version> historicalFeatures) implements FeatureSpecification {
mosche marked this conversation as resolved.
Show resolved Hide resolved
@Override
public Map<NodeFeature, Version> getHistoricalFeatures() {
return historicalFeatures;
}
}

private static final FeatureSpecification HISTORICAL_FEATURES;
private static final Set<String> FEATURE_NAMES;

static {
String metadataPath = System.getProperty("tests.features.metadata.path");
if (metadataPath == null) {
FEATURE_NAMES = emptySet();
HISTORICAL_FEATURES = null;
} else {
Set<String> featureNames = new HashSet<>();
Map<NodeFeature, Version> historicalFeatures = new HashMap<>();
loadFeatureMetadata(metadataPath, (key, value) -> {
if (key.equals("historical_features") && value instanceof Map<?, ?> map) {
for (var entry : map.entrySet()) {
historicalFeatures.put(new NodeFeature((String) entry.getKey()), Version.fromString((String) entry.getValue()));
}
}
if (key.equals("feature_names") && value instanceof Collection<?> collection) {
for (var entry : collection) {
featureNames.add((String) entry);
}
}
});
FEATURE_NAMES = Collections.unmodifiableSet(featureNames);
HISTORICAL_FEATURES = new HistoricalFeatureSpec(Collections.unmodifiableMap(historicalFeatures));
}
}

@SuppressForbidden(reason = "File#pathSeparator has not equivalent in java.nio.file")
private static void loadFeatureMetadata(String metadataPath, BiConsumer<String, Object> consumer) {
String[] metadataFiles = metadataPath.split(File.pathSeparator);
for (String metadataFile : metadataFiles) {
try (
InputStream in = Files.newInputStream(PathUtils.get(metadataFile));
mosche marked this conversation as resolved.
Show resolved Hide resolved
XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, in)
) {
parser.map().forEach(consumer);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
package org.elasticsearch.extractor.features;

import org.elasticsearch.Version;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.features.FeatureSpecification;
import org.elasticsearch.features.NodeFeature;
Expand All @@ -24,8 +25,10 @@
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;

public class HistoricalFeaturesMetadataExtractor {
private final ClassLoader classLoader;
Expand Down Expand Up @@ -62,23 +65,36 @@ public void generateMetadataFile(Path outputFile) {
XContentGenerator generator = JsonXContent.jsonXContent.createGenerator(os)
) {
generator.writeStartObject();
for (Map.Entry<NodeFeature, Version> entry : extractHistoricalFeatureMetadata().entrySet()) {
generator.writeStringField(entry.getKey().id(), entry.getValue().toString());
}
extractHistoricalFeatureMetadata((historical, names) -> {
generator.writeFieldName("historical_features");
generator.writeStartObject();
for (Map.Entry<NodeFeature, Version> entry : historical.entrySet()) {
generator.writeStringField(entry.getKey().id(), entry.getValue().toString());
}
generator.writeEndObject();
generator.writeFieldName("feature_names");
generator.writeStartArray();
for (var entry : names) {
generator.writeString(entry);
}
generator.writeEndArray();
});
generator.writeEndObject();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}

public Map<NodeFeature, Version> extractHistoricalFeatureMetadata() {
void extractHistoricalFeatureMetadata(CheckedBiConsumer<Map<NodeFeature, Version>, Set<String>, IOException> metadataConsumer)
throws IOException {
Map<NodeFeature, Version> historicalFeatures = new HashMap<>();
Set<String> featureNames = new HashSet<>();
ServiceLoader<FeatureSpecification> featureSpecLoader = ServiceLoader.load(FeatureSpecification.class, classLoader);
for (FeatureSpecification featureSpecification : featureSpecLoader) {
historicalFeatures.putAll(featureSpecification.getHistoricalFeatures());
featureSpecification.getFeatures().stream().map(NodeFeature::id).forEach(featureNames::add);
}

return historicalFeatures;
metadataConsumer.accept(historicalFeatures, featureNames);
}

private static void printUsageAndExit() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,18 @@
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY;
import static org.hamcrest.Matchers.anEmptyMap;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;

public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase {
Expand All @@ -33,16 +40,28 @@ public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase {

public void testExtractHistoricalMetadata() throws IOException {
HistoricalFeaturesMetadataExtractor extractor = new HistoricalFeaturesMetadataExtractor(this.getClass().getClassLoader());
Map<NodeFeature, Version> nodeFeatureVersionMap = extractor.extractHistoricalFeatureMetadata();
Map<NodeFeature, Version> nodeFeatureVersionMap = new HashMap<>();
Set<String> featureNamesSet = new HashSet<>();
extractor.extractHistoricalFeatureMetadata((historical, names) -> {
nodeFeatureVersionMap.putAll(historical);
featureNamesSet.addAll(names);
});
assertThat(nodeFeatureVersionMap, not(anEmptyMap()));
assertThat(featureNamesSet, not(empty()));

Path outputFile = temporaryFolder.newFile().toPath();
extractor.generateMetadataFile(outputFile);
try (XContentParser parser = JsonXContent.jsonXContent.createParser(EMPTY, Files.newInputStream(outputFile))) {
Map<String, String> parsedMap = parser.mapStrings();
for (Map.Entry<NodeFeature, Version> entry : nodeFeatureVersionMap.entrySet()) {
assertThat(parsedMap, hasEntry(entry.getKey().id(), entry.getValue().toString()));
}
Map<String, Object> parsedMap = parser.map();
assertThat(parsedMap, hasKey("historical_features"));
assertThat(parsedMap, hasKey("feature_names"));
@SuppressWarnings("unchecked")
Map<String, Object> historicalFeaturesMap = (Map<String, Object>) (parsedMap.get("historical_features"));
nodeFeatureVersionMap.forEach((key, value) -> assertThat(historicalFeaturesMap, hasEntry(key.id(), value.toString())));

@SuppressWarnings("unchecked")
Collection<String> featureNamesList = (Collection<String>) (parsedMap.get("feature_names"));
assertThat(featureNamesList, containsInAnyOrder(featureNamesSet.toArray()));
}
}
}