Skip to content

Commit

Permalink
Fix merge
Browse files Browse the repository at this point in the history
  • Loading branch information
carlosdelest committed Apr 9, 2024
1 parent 3c29dcb commit 0f57a5b
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,7 @@
- match: { nodes.$node_id.indices.mappings.total_estimated_overhead_in_bytes: 0 }

---
"indices mappings count test for indices level":
"indices mappings exact count test for indices level":

- skip:
features: [arbitrary_key]
Expand Down Expand Up @@ -468,7 +468,7 @@
- do:
nodes.stats: { metric: _all, level: "indices", human: true }

# In the below assertions, we expect a field count of at least 26 because the above mapping expects the following:
# In the below assertions, we expect a field count of 26 because the above mapping expects the following:
# Field mappers (incl. alias fields and object mappers' flattened leaves):
# 1. _data_stream_timestamp
# 2. _doc_count
Expand Down Expand Up @@ -498,17 +498,13 @@
# 25. authors.name
# Runtime field mappers:
# 26. a_source_field
#
# Plugins (which may or may not be loaded depending on the context in which this test is executed) may add additional
# field mappers:
# 27. _semantic_text_inference (from ML plugin)

- gte: { nodes.$node_id.indices.mappings.total_count: 26 }
- is_true: nodes.$node_id.indices.mappings.total_estimated_overhead
- gte: { nodes.$node_id.indices.mappings.total_estimated_overhead_in_bytes: 26624 }
- gte: { nodes.$node_id.indices.indices.index1.mappings.total_count: 26 }
- match: { nodes.$node_id.indices.indices.index1.mappings.total_count: 26 }
- is_true: nodes.$node_id.indices.indices.index1.mappings.total_estimated_overhead
- gte: { nodes.$node_id.indices.indices.index1.mappings.total_estimated_overhead_in_bytes: 26624 }
- match: { nodes.$node_id.indices.indices.index1.mappings.total_estimated_overhead_in_bytes: 26624 }

---
"indices mappings does not exist in shards level":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -540,7 +540,7 @@ public Iterator<Setting<?>> settings() {

public static final String KEY_SHARD_SIZE_FORECAST = "shard_size_forecast";

public static final String KEY_FIELD_INFERENCE = "field_inference";
public static final String KEY_INFERENCE_FIELDS = "field_inference";

public static final String INDEX_STATE_FILE_PREFIX = "state-";

Expand Down Expand Up @@ -2437,7 +2437,7 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build
}

if (indexMetadata.getInferenceFields().isEmpty() == false) {
builder.startObject(KEY_FIELD_INFERENCE);
builder.startObject(KEY_INFERENCE_FIELDS);
for (InferenceFieldMetadata field : indexMetadata.getInferenceFields().values()) {
field.toXContent(builder, params);
}
Expand Down Expand Up @@ -2521,7 +2521,7 @@ public static IndexMetadata fromXContent(XContentParser parser, Map<String, Mapp
case KEY_STATS:
builder.stats(IndexMetadataStats.fromXContent(parser));
break;
case KEY_FIELD_INFERENCE:
case KEY_INFERENCE_FIELDS:
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
builder.putInferenceField(InferenceFieldMetadata.fromXContent(parser));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,13 @@
import java.util.List;
import java.util.Objects;

/**
* Contains inference field data for fields.
* As inference is done in the coordinator node to avoid re-doing it at shard / replica level, the coordinator needs to check for the need
* to perform inference for specific fields in an index.
* Given that the coordinator node does not necessarily have mapping information for all indices (only for those that have shards
* in the node), the field inference information must be stored in the IndexMetadata and broadcasted to all nodes.
*/
public final class InferenceFieldMetadata implements SimpleDiffable<InferenceFieldMetadata>, ToXContentFragment {
private static final String INFERENCE_ID_FIELD = "inference_id";
private static final String SOURCE_FIELDS_FIELD = "source_fields";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1132,10 +1132,6 @@ public String typeName() {
return CONTENT_TYPE;
}

public Integer getDims() {
return dims;
}

@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
if (format != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,12 +171,9 @@ public void parse(DocumentParserContext context) throws IOException {
}

String feature = null;
boolean origIsWithLeafObject = context.path().isWithinLeafObject();
try {
// make sure that we don't expand dots in field names while parsing
if (context.path().isWithinLeafObject() == false) {
context.path().setWithinLeafObject(true);
}
context.path().setWithinLeafObject(true);
for (Token token = context.parser().nextToken(); token != Token.END_OBJECT; token = context.parser().nextToken()) {
if (token == Token.FIELD_NAME) {
feature = context.parser().currentName();
Expand Down Expand Up @@ -210,7 +207,7 @@ public void parse(DocumentParserContext context) throws IOException {
context.addToFieldNames(fieldType().name());
}
} finally {
context.path().setWithinLeafObject(origIsWithLeafObject);
context.path().setWithinLeafObject(false);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,4 +63,10 @@ private static InferenceFieldMetadata createTestItem() {
String[] inputFields = generateRandomStringArray(5, 10, false, false);
return new InferenceFieldMetadata(name, inferenceId, inputFields);
}

public void testNullCtorArgsThrowException() {
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata(null, "inferenceId", new String[0]));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", null, new String[0]));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", null));
}
}

0 comments on commit 0f57a5b

Please sign in to comment.