Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support ML Inference Search Processor Writing to Search Extension #3061

Merged
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
package org.opensearch.ml.processor;

import java.io.IOException;
import java.util.Map;

import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.search.SearchResponseSections;
import org.opensearch.action.search.ShardSearchFailure;
import org.opensearch.core.xcontent.XContentBuilder;

public class MLInferenceSearchResponse extends SearchResponse {
mingshl marked this conversation as resolved.
Show resolved Hide resolved
private static final String EXT_SECTION_NAME = "ext";

private Map<String, Object> params;

public MLInferenceSearchResponse(
Map<String, Object> params,
SearchResponseSections internalResponse,
String scrollId,
int totalShards,
int successfulShards,
int skippedShards,
long tookInMillis,
ShardSearchFailure[] shardFailures,
Clusters clusters
) {
super(internalResponse, scrollId, totalShards, successfulShards, skippedShards, tookInMillis, shardFailures, clusters);
this.params = params;
}

public void setParams(Map<String, Object> params) {
this.params = params;
}

public Map<String, Object> getParams() {
return this.params;
}
mingshl marked this conversation as resolved.
Show resolved Hide resolved

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
innerToXContent(builder, params);
mingshl marked this conversation as resolved.
Show resolved Hide resolved

if (this.params != null) {
builder.startObject(EXT_SECTION_NAME);
builder.field(MLInferenceSearchResponseProcessor.TYPE, this.params);

builder.endObject();
}
builder.endObject();
return builder;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ public class MLInferenceSearchResponseProcessor extends AbstractProcessor implem
// it can be overwritten using max_prediction_tasks when creating processor
public static final int DEFAULT_MAX_PREDICTION_TASKS = 10;
public static final String DEFAULT_OUTPUT_FIELD_NAME = "inference_results";
public static final String EXTENSION_PREFIX = "ext.ml_inference";
mingshl marked this conversation as resolved.
Show resolved Hide resolved

protected MLInferenceSearchResponseProcessor(
String modelId,
Expand Down Expand Up @@ -158,7 +159,19 @@ public void processResponseAsync(

// if many to one, run rewriteResponseDocuments
if (!oneToOne) {
rewriteResponseDocuments(response, responseListener);
// use MLInferenceSearchResponseProcessor to allow writing to extension
MLInferenceSearchResponse mLInferenceSearchResponse = new MLInferenceSearchResponse(
null,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In your SearchResponse, I noticed:

        if (this.params != null) {
            builder.startObject(EXT_SECTION_NAME);
            builder.field(MLInferenceSearchResponseProcessor.TYPE, this.params);

            builder.endObject();
        }
        ```
        
        But we are sending null from here?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if a search response doesn't have a search extension, we don't add null params to the search extension,

for example, a search response with null params looks like this:

{
  "took": 1,
  "timed_out": false,
  "_shards": {
    "total": 1,
    "successful": 1,
    "skipped": 0,
    "failed": 0
  },
  "hits": {
    "total": {
      "value": 3,
      "relation": "eq"
    },
    "max_score": 1,
    "hits": [
      {
        "_index": "review_string_index",
        "_id": "1",
        "_score": 1,
        "_source": {
          "review": "Dr. Eric Goldberg is a fantastic doctor who has correctly diagnosed every issue that my wife and I have had. Unlike many of my past doctors, Dr. Goldberg is very accessible and we have been able to schedule appointments with him and his staff very quickly. We are happy to have him in the neighborhood and look forward to being his patients for many years to come.",
          "label": "5 stars"
        }
      }
}

for example, a search response with some params looks like this:

{
  "took": 1,
  "timed_out": false,
  "_shards": {
    "total": 1,
    "successful": 1,
    "skipped": 0,
    "failed": 0
  },
  "hits": {
    "total": {
      "value": 3,
      "relation": "eq"
    },
    "max_score": 1,
    "hits": [
      {
        "_index": "review_string_index",
        "_id": "1",
        "_score": 1,
        "_source": {
          "review": "Dr. Eric Goldberg is a fantastic doctor who has correctly diagnosed every issue that my wife and I have had. Unlike many of my past doctors, Dr. Goldberg is very accessible and we have been able to schedule appointments with him and his staff very quickly. We are happy to have him in the neighborhood and look forward to being his patients for many years to come.",
          "label": "5 stars"
        }
      }
  },
  "ext": {
    "ml_inference": {
      "llm_response": """ Based on the context provided:

- The first document is a positive review of Dr. Eric Goldberg from a patient. It praises Dr. Goldberg for correctly diagnosing issues for the patient and their wife. It also notes that Dr. Goldberg is very accessible and appointments can be scheduled quickly with him and his staff. The patient expresses happiness that Dr. Goldberg is in their neighborhood and looks forward to being his patient for many years.

- The second document just says "happy visit". 

- The third document says "sad place".

- In summary, the first document positively reviews a doctor, Dr. Eric Goldberg. The other two documents don't provide much context on their own, just mentioning a "happy visit" and "sad place"."""
    }
  }
}

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

following up in this discussion, I added the check to see if the processing response is in the type of MLInferenceSearchResponse, initiate it when it's not a MLInferenceSearchResponse . f7682e0

response.getInternalResponse(),
response.getScrollId(),
response.getTotalShards(),
response.getSuccessfulShards(),
response.getSkippedShards(),
response.getSuccessfulShards(),
response.getShardFailures(),
response.getClusters()
);
rewriteResponseDocuments(mLInferenceSearchResponse, responseListener);
} else {
// if one to one, make one hit search response and run rewriteResponseDocuments
GroupedActionListener<SearchResponse> combineResponseListener = getCombineResponseGroupedActionListener(
Expand Down Expand Up @@ -545,22 +558,37 @@ public void onResponse(Map<Integer, MLOutput> multipleMLOutputs) {
} else {
modelOutputValuePerDoc = modelOutputValue;
}

if (sourceAsMap.containsKey(newDocumentFieldName)) {
if (override) {
sourceAsMapWithInference.remove(newDocumentFieldName);
sourceAsMapWithInference.put(newDocumentFieldName, modelOutputValuePerDoc);
// writing to search response extension
if (newDocumentFieldName.startsWith(EXTENSION_PREFIX)) {
Map<String, Object> params = ((MLInferenceSearchResponse) response).getParams();
String paramsName = newDocumentFieldName.replaceFirst(EXTENSION_PREFIX + ".", "");

if (params != null) {
params.put(paramsName, modelOutputValuePerDoc);
((MLInferenceSearchResponse) response).setParams(params);
} else {
logger
.debug(
"{} already exists in the search response hit. Skip processing this field.",
newDocumentFieldName
);
// TODO when the response has the same field name, should it throw exception? currently,
// ingest processor quietly skip it
Map<String, Object> newParams = new HashMap<>();
newParams.put(paramsName, modelOutputValuePerDoc);
((MLInferenceSearchResponse) response).setParams(newParams);
}
} else {
sourceAsMapWithInference.put(newDocumentFieldName, modelOutputValuePerDoc);
// writing to search response hits
if (sourceAsMap.containsKey(newDocumentFieldName)) {
if (override) {
sourceAsMapWithInference.remove(newDocumentFieldName);
sourceAsMapWithInference.put(newDocumentFieldName, modelOutputValuePerDoc);
} else {
logger
.debug(
"{} already exists in the search response hit. Skip processing this field.",
newDocumentFieldName
);
// TODO when the response has the same field name, should it throw exception? currently,
// ingest processor quietly skip it
mingshl marked this conversation as resolved.
Show resolved Hide resolved
}
} else {
sourceAsMapWithInference.put(newDocumentFieldName, modelOutputValuePerDoc);
}
}
}
}
Expand Down Expand Up @@ -774,6 +802,21 @@ public MLInferenceSearchResponseProcessor create(
+ ". Please adjust mappings."
);
}
boolean writeToSearchExtension = false;
mingshl marked this conversation as resolved.
Show resolved Hide resolved

if (outputMaps != null) {
for (Map<String, String> outputMap : outputMaps) {
for (String key : outputMap.keySet()) {
if (key.startsWith(EXTENSION_PREFIX)) {
writeToSearchExtension = true;
break;
}
}
}
}
mingshl marked this conversation as resolved.
Show resolved Hide resolved
if (writeToSearchExtension & oneToOne) {
throw new IllegalArgumentException("Writing model response to search extension does not support when one_to_one is true.");
mingshl marked this conversation as resolved.
Show resolved Hide resolved
}

return new MLInferenceSearchResponseProcessor(
modelId,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import static org.opensearch.ml.processor.MLInferenceSearchResponseProcessor.FULL_RESPONSE_PATH;
import static org.opensearch.ml.processor.MLInferenceSearchResponseProcessor.FUNCTION_NAME;
import static org.opensearch.ml.processor.MLInferenceSearchResponseProcessor.MODEL_INPUT;
import static org.opensearch.ml.processor.MLInferenceSearchResponseProcessor.ONE_TO_ONE;
import static org.opensearch.ml.processor.MLInferenceSearchResponseProcessor.TYPE;

import java.util.ArrayList;
Expand Down Expand Up @@ -60,6 +61,7 @@
import org.opensearch.search.SearchHits;
import org.opensearch.search.SearchModule;
import org.opensearch.search.builder.SearchSourceBuilder;
import org.opensearch.search.internal.InternalSearchResponse;
import org.opensearch.search.pipeline.PipelineProcessingContext;
import org.opensearch.test.AbstractBuilderTestCase;

Expand Down Expand Up @@ -503,6 +505,81 @@ public void onFailure(Exception e) {
verify(client, times(1)).execute(any(), any(), any());
}

/**
* Tests the successful processing of a response with a single pair of input and output mappings.
* read the query text into model config
* with query extensions
* @throws Exception if an error occurs during the test
*/
public void testProcessResponseSuccessWriteToExt() throws Exception {
mingshl marked this conversation as resolved.
Show resolved Hide resolved
String documentField = "text";
String modelInputField = "context";
List<Map<String, String>> inputMap = new ArrayList<>();
Map<String, String> input = new HashMap<>();
input.put(modelInputField, documentField);
inputMap.add(input);

String newDocumentField = "ext.ml_inference.llm_response";
String modelOutputField = "response";
List<Map<String, String>> outputMap = new ArrayList<>();
Map<String, String> output = new HashMap<>();
output.put(newDocumentField, modelOutputField);
outputMap.add(output);
Map<String, String> modelConfig = new HashMap<>();
modelConfig
.put(
"prompt",
"\\n\\nHuman: You are a professional data analyst. You will always answer question based on the given context first. If the answer is not directly shown in the context, you will analyze the data and find the answer. If you don't know the answer, just say I don't know. Context: ${parameters.context}. \\n\\n Human: please summarize the documents \\n\\n Assistant:"
);
MLInferenceSearchResponseProcessor responseProcessor = new MLInferenceSearchResponseProcessor(
"model1",
inputMap,
outputMap,
modelConfig,
DEFAULT_MAX_PREDICTION_TASKS,
PROCESSOR_TAG,
DESCRIPTION,
false,
"remote",
false,
false,
false,
"{ \"parameters\": ${ml_inference.parameters} }",
client,
TEST_XCONTENT_REGISTRY_FOR_QUERY,
false
);

SearchRequest request = getSearchRequest();
String fieldName = "text";
SearchResponse response = getSearchResponse(5, true, fieldName);

ModelTensor modelTensor = ModelTensor.builder().dataAsMap(ImmutableMap.of("response", "there is 1 value")).build();
ModelTensors modelTensors = ModelTensors.builder().mlModelTensors(Arrays.asList(modelTensor)).build();
ModelTensorOutput mlModelTensorOutput = ModelTensorOutput.builder().mlModelOutputs(Arrays.asList(modelTensors)).build();

doAnswer(invocation -> {
ActionListener<MLTaskResponse> actionListener = invocation.getArgument(2);
actionListener.onResponse(MLTaskResponse.builder().output(mlModelTensorOutput).build());
return null;
}).when(client).execute(any(), any(), any());

ActionListener<SearchResponse> listener = new ActionListener<>() {
@Override
public void onResponse(SearchResponse newSearchResponse) {
assertEquals(newSearchResponse.getHits().getHits().length, 5);
}

@Override
public void onFailure(Exception e) {
throw new RuntimeException(e);
}

};
responseProcessor.processResponseAsync(request, response, responseContext, listener);
verify(client, times(1)).execute(any(), any(), any());
}

/**
* Tests create processor with one_to_one is true
* with no mapping provided
Expand Down Expand Up @@ -978,14 +1055,18 @@ public void testProcessResponseCreateRewriteResponseListenerExceptionIgnoreFailu

SearchResponse mockResponse = mock(SearchResponse.class);
SearchHits searchHits = response.getHits();

InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, null, false, null, 1);
when(mockResponse.getInternalResponse()).thenReturn(internalSearchResponse);

RuntimeException mockException = new RuntimeException("Mock exception");
AtomicInteger callCount = new AtomicInteger(0);
;

when(mockResponse.getHits()).thenAnswer(invocation -> {

int count = callCount.getAndIncrement();

if (count == 2) {
if (count == 6) {
mingshl marked this conversation as resolved.
Show resolved Hide resolved
// throw exception when it reaches createRewriteResponseListener
throw mockException;
} else {
Expand All @@ -1011,7 +1092,7 @@ public void onFailure(Exception e) {
}

/**
* Tests create processor with one_to_one is true
* Tests create processor with one_to_one is false
* with output_maps
* createRewriteResponseListener throw Exceptions
* expect to run one prediction task
Expand Down Expand Up @@ -1066,7 +1147,10 @@ public void testProcessResponseCreateRewriteResponseListenerException() throws E
SearchHits searchHits = response.getHits();
RuntimeException mockException = new RuntimeException("Mock exception");
AtomicInteger callCount = new AtomicInteger(0);
;

InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, null, false, null, 1);
when(mockResponse.getInternalResponse()).thenReturn(internalSearchResponse);

when(mockResponse.getHits()).thenAnswer(invocation -> {

int count = callCount.getAndIncrement();
Expand Down Expand Up @@ -3538,7 +3622,7 @@ public void testOutputMapsExceedInputMaps() throws Exception {
output2.put("hashtag_embedding", "response");
outputMap.add(output2);
Map<String, String> output3 = new HashMap<>();
output2.put("hashtvg_embedding", "response");
output3.put("hashtvg_embedding", "response");
outputMap.add(output3);
config.put(OUTPUT_MAP, outputMap);
config.put(MAX_PREDICTION_TASKS, 2);
Expand Down Expand Up @@ -3587,4 +3671,40 @@ public void testCreateOptionalFields() throws Exception {
assertEquals(MLInferenceSearchResponseProcessor.getTag(), processorTag);
assertEquals(MLInferenceSearchResponseProcessor.getType(), MLInferenceSearchResponseProcessor.TYPE);
}

/**
* Tests the case where output map try to write to extension and one to one inference is true
* and an exception is expected.
*
* @throws Exception if an error occurs during the test
*/
public void testWriteToExtensionAndOneToOne() throws Exception {
Map<String, Object> config = new HashMap<>();
config.put(MODEL_ID, "model2");
List<Map<String, String>> inputMap = new ArrayList<>();
Map<String, String> input0 = new HashMap<>();
input0.put("inputs", "text");
inputMap.add(input0);
Map<String, String> input1 = new HashMap<>();
input1.put("inputs", "hashtag");
inputMap.add(input1);
config.put(INPUT_MAP, inputMap);
List<Map<String, String>> outputMap = new ArrayList<>();
Map<String, String> output1 = new HashMap<>();
output1.put("text_embedding", "response");
outputMap.add(output1);
Map<String, String> output2 = new HashMap<>();
output2.put("ext.inference.hashtag_embedding", "response");
outputMap.add(output2);
config.put(OUTPUT_MAP, outputMap);
config.put(ONE_TO_ONE, true);
String processorTag = randomAlphaOfLength(10);

try {
factory.create(Collections.emptyMap(), processorTag, null, false, config, null);
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "");

}
}
}
Loading