Skip to content

Commit

Permalink
Remove unnecessary code in BulkShardRequest
Browse files Browse the repository at this point in the history
  • Loading branch information
carlosdelest committed May 14, 2024
1 parent f50d41f commit 45b884e
Showing 1 changed file with 0 additions and 32 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,13 @@
import org.elasticsearch.action.support.replication.ReplicatedWriteRequest;
import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.cluster.metadata.InferenceFieldMetadata;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.transport.RawIndexingDataTransportRequest;

import java.io.IOException;
import java.util.Map;
import java.util.Set;

public final class BulkShardRequest extends ReplicatedWriteRequest<BulkShardRequest>
Expand All @@ -35,8 +33,6 @@ public final class BulkShardRequest extends ReplicatedWriteRequest<BulkShardRequ

private final BulkItemRequest[] items;

private transient Map<String, InferenceFieldMetadata> inferenceFieldMap = null;

public BulkShardRequest(StreamInput in) throws IOException {
super(in);
items = in.readArray(i -> i.readOptionalWriteable(inpt -> new BulkItemRequest(shardId, inpt)), BulkItemRequest[]::new);
Expand All @@ -48,30 +44,6 @@ public BulkShardRequest(ShardId shardId, RefreshPolicy refreshPolicy, BulkItemRe
setRefreshPolicy(refreshPolicy);
}

/**
* Public for test
* Set the transient metadata indicating that this request requires running inference before proceeding.
*/
public void setInferenceFieldMap(Map<String, InferenceFieldMetadata> fieldInferenceMap) {
this.inferenceFieldMap = fieldInferenceMap;
}

/**
* Consumes the inference metadata to execute inference on the bulk items just once.
*/
public Map<String, InferenceFieldMetadata> consumeInferenceFieldMap() {
Map<String, InferenceFieldMetadata> ret = inferenceFieldMap;
inferenceFieldMap = null;
return ret;
}

/**
* Public for test
*/
public Map<String, InferenceFieldMetadata> getInferenceFieldMap() {
return inferenceFieldMap;
}

public long totalSizeInBytes() {
long totalSizeInBytes = 0;
for (int i = 0; i < items.length; i++) {
Expand Down Expand Up @@ -113,10 +85,6 @@ public String[] indices() {

@Override
public void writeTo(StreamOutput out) throws IOException {
if (inferenceFieldMap != null) {
// Inferencing metadata should have been consumed as part of the ShardBulkInferenceActionFilter processing
throw new IllegalStateException("Inference metadata should have been consumed before writing to the stream");
}
super.writeTo(out);
out.writeArray((o, item) -> {
if (item != null) {
Expand Down

0 comments on commit 45b884e

Please sign in to comment.