Skip to content

Commit

Permalink
Fix parsing error, styling
Browse files Browse the repository at this point in the history
  • Loading branch information
carlosdelest committed Mar 14, 2024
1 parent ba6f00f commit f3a6af0
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 95 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import org.elasticsearch.cluster.metadata.IndexGraveyard;
import org.elasticsearch.cluster.metadata.IndexGraveyardTests;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexMetadataTests;
import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.RepositoriesMetadata;
Expand Down Expand Up @@ -62,6 +61,7 @@
import static java.util.Collections.emptyList;
import static java.util.Collections.emptySet;
import static org.elasticsearch.cluster.metadata.AliasMetadata.newAliasMetadataBuilder;
import static org.elasticsearch.cluster.metadata.IndexMetadataTests.randomFieldInferenceMetadata;
import static org.elasticsearch.cluster.routing.RandomShardRoutingMutator.randomChange;
import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder;
import static org.elasticsearch.cluster.routing.UnassignedInfoTests.randomUnassignedInfo;
Expand Down Expand Up @@ -587,17 +587,13 @@ public IndexMetadata randomChange(IndexMetadata part) {
builder.settings(Settings.builder().put(part.getSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0));
break;
case 3:
builder.fieldInferenceMetadata(IndexMetadataTests.randomFieldInferenceMetadata(true));
builder.fieldInferenceMetadata(randomFieldInferenceMetadata(true));
break;
default:
throw new IllegalArgumentException("Shouldn't be here");
}
return builder.build();
}

/**
* Generates a random fieldsForModels map
*/
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
Expand All @@ -38,7 +39,9 @@
public class FieldInferenceMetadata implements Diffable<FieldInferenceMetadata>, ToXContentFragment {

private final ImmutableOpenMap<String, FieldInference> fieldInferenceMap;
private Map<String, Set<String>> fieldsForInferenceIds;

// Contains a lazily cached, reversed map of inferenceId -> fields
private volatile Map<String, Set<String>> fieldsForInferenceIds;

public static final FieldInferenceMetadata EMPTY = new FieldInferenceMetadata(ImmutableOpenMap.of());

Expand All @@ -62,48 +65,68 @@ public boolean isEmpty() {
return fieldInferenceMap.isEmpty();
}

public record FieldInference(String inferenceId, Set<String> sourceFields)
implements
SimpleDiffable<FieldInference>,
ToXContentFragment {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeMap(fieldInferenceMap, (o, v) -> v.writeTo(o));
}

public static final ParseField INFERENCE_ID_FIELD = new ParseField("inference_id");
public static final ParseField SOURCE_FIELDS_FIELD = new ParseField("source_fields");
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.map(fieldInferenceMap);
return builder;
}

FieldInference(StreamInput in) throws IOException {
this(in.readString(), in.readCollectionAsImmutableSet(StreamInput::readString));
}
public static FieldInferenceMetadata fromXContent(XContentParser parser) throws IOException {
return new FieldInferenceMetadata(parser.map(HashMap::new, FieldInference::fromXContent));
}

@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(inferenceId);
out.writeStringCollection(sourceFields);
}
public String getInferenceIdForField(String field) {
return getInferenceSafe(field, FieldInference::inferenceId);
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(INFERENCE_ID_FIELD.getPreferredName(), inferenceId);
builder.field(SOURCE_FIELDS_FIELD.getPreferredName(), sourceFields);
builder.endObject();
return builder;
private <T> T getInferenceSafe(String field, Function<FieldInference, T> fieldInferenceFunction) {
FieldInference fieldInference = fieldInferenceMap.get(field);
if (fieldInference == null) {
return null;
}
return fieldInferenceFunction.apply(fieldInference);
}

public static FieldInference fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
public Set<String> getSourceFields(String field) {
return getInferenceSafe(field, FieldInference::sourceFields);
}

public Map<String, Set<String>> getFieldsForInferenceIds() {
if (fieldsForInferenceIds != null) {
return fieldsForInferenceIds;
}

@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<FieldInference, Void> PARSER = new ConstructingObjectParser<>(
"field_inference_parser",
false,
(args, unused) -> new FieldInference((String) args[0], (Set<String>) args[1])
);
// Cache the result as a field
Map<String, Set<String>> fieldsForInferenceIdsMap = new HashMap<>();
for (Map.Entry<String, FieldInference> entry : fieldInferenceMap.entrySet()) {
String fieldName = entry.getKey();
String inferenceId = entry.getValue().inferenceId();

static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFERENCE_ID_FIELD);
PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), SOURCE_FIELDS_FIELD);
// Get or create the set associated with the inferenceId
Set<String> fields = fieldsForInferenceIdsMap.computeIfAbsent(inferenceId, k -> new HashSet<>());
fields.add(fieldName);
}

fieldsForInferenceIds = Collections.unmodifiableMap(fieldsForInferenceIdsMap);
return fieldsForInferenceIds;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldInferenceMetadata that = (FieldInferenceMetadata) o;
return Objects.equals(fieldInferenceMap, that.fieldInferenceMap);
}

@Override
public int hashCode() {
return Objects.hash(fieldInferenceMap);
}

@Override
Expand Down Expand Up @@ -158,67 +181,47 @@ public FieldInferenceMetadata apply(FieldInferenceMetadata part) {
}
}

@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeMap(fieldInferenceMap, (o, v) -> v.writeTo(o));
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.map(fieldInferenceMap);
return builder;
}

public static FieldInferenceMetadata fromXContent(XContentParser parser) throws IOException {
return new FieldInferenceMetadata(parser.map(HashMap::new, FieldInference::fromXContent));
}
public record FieldInference(String inferenceId, Set<String> sourceFields)
implements
SimpleDiffable<FieldInference>,
ToXContentFragment {

public String getInferenceIdForField(String field) {
return getInferenceSafe(field, FieldInference::inferenceId);
}
public static final ParseField INFERENCE_ID_FIELD = new ParseField("inference_id");
public static final ParseField SOURCE_FIELDS_FIELD = new ParseField("source_fields");

private <T> T getInferenceSafe(String field, Function<FieldInference, T> fieldInferenceFunction) {
FieldInference fieldInference = fieldInferenceMap.get(field);
if (fieldInference == null) {
return null;
FieldInference(StreamInput in) throws IOException {
this(in.readString(), in.readCollectionAsImmutableSet(StreamInput::readString));
}
return fieldInferenceFunction.apply(fieldInference);
}

public Set<String> getSourceFields(String field) {
return getInferenceSafe(field, FieldInference::sourceFields);
}

public Map<String, Set<String>> getFieldsForInferenceIds() {
if (fieldsForInferenceIds != null) {
return fieldsForInferenceIds;
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(inferenceId);
out.writeStringCollection(sourceFields);
}

// Cache the result as a field
Map<String, Set<String>> fieldsForInferenceIdsMap = new HashMap<>();
for (Map.Entry<String, FieldInference> entry : fieldInferenceMap.entrySet()) {
String fieldName = entry.getKey();
String inferenceId = entry.getValue().inferenceId();

// Get or create the set associated with the inferenceId
Set<String> fields = fieldsForInferenceIdsMap.computeIfAbsent(inferenceId, k -> new HashSet<>());
fields.add(fieldName);
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(INFERENCE_ID_FIELD.getPreferredName(), inferenceId);
builder.field(SOURCE_FIELDS_FIELD.getPreferredName(), sourceFields);
builder.endObject();
return builder;
}

fieldsForInferenceIds = Collections.unmodifiableMap(fieldsForInferenceIdsMap);
return fieldsForInferenceIds;
}
public static FieldInference fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldInferenceMetadata that = (FieldInferenceMetadata) o;
return Objects.equals(fieldInferenceMap, that.fieldInferenceMap);
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<FieldInference, Void> PARSER = new ConstructingObjectParser<>(
"field_inference_parser",
false,
(args, unused) -> new FieldInference((String) args[0], new HashSet<>((List<String>) args[1]))
);

@Override
public int hashCode() {
return Objects.hash(fieldInferenceMap);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFERENCE_ID_FIELD);
PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), SOURCE_FIELDS_FIELD);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -540,7 +540,7 @@ public Iterator<Setting<?>> settings() {

public static final String KEY_SHARD_SIZE_FORECAST = "shard_size_forecast";

public static final String KEY_FIELD_INFERENCE_METADATA = "field_inference_metadata";
public static final String KEY_FIELD_INFERENCE = "field_inference";

public static final String INDEX_STATE_FILE_PREFIX = "state-";

Expand Down Expand Up @@ -2423,7 +2423,7 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build
}

if (indexMetadata.fieldInferenceMetadata.isEmpty() == false) {
builder.field(KEY_FIELD_INFERENCE_METADATA, indexMetadata.fieldInferenceMetadata);
builder.field(KEY_FIELD_INFERENCE, indexMetadata.fieldInferenceMetadata);
}

builder.endObject();
Expand Down Expand Up @@ -2503,7 +2503,7 @@ public static IndexMetadata fromXContent(XContentParser parser, Map<String, Mapp
case KEY_STATS:
builder.stats(IndexMetadataStats.fromXContent(parser));
break;
case KEY_FIELD_INFERENCE_METADATA:
case KEY_FIELD_INFERENCE:
builder.fieldInferenceMetadata(FieldInferenceMetadata.fromXContent(parser));
break;
default:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -578,7 +578,7 @@ public static FieldInferenceMetadata randomFieldInferenceMetadata(boolean allowN
}

private static FieldInferenceMetadata.FieldInference randomFieldInference() {
return new FieldInferenceMetadata.FieldInference(randomAlphaOfLength(5), randomSet(0, 5, () -> randomIdentifier()));
return new FieldInferenceMetadata.FieldInference(randomIdentifier(), randomSet(0, 5, ESTestCase::randomIdentifier));
}

private IndexMetadataStats randomIndexStats(int numberOfShards) {
Expand Down

0 comments on commit f3a6af0

Please sign in to comment.