Skip to content

Commit

Permalink
Support relevant form toggling in advanced transform modals (opensear…
Browse files Browse the repository at this point in the history
…ch-project#385)

* Support one-to-one on input

Signed-off-by: Tyler Ohlsen <[email protected]>

* dynamic description; add field; add input docs limit

Signed-off-by: Tyler Ohlsen <[email protected]>

* expose onetoone on input transform (search resp)

Signed-off-by: Tyler Ohlsen <[email protected]>

* Onboard full_response_path for output (ingest, search resp)

Signed-off-by: Tyler Ohlsen <[email protected]>

---------

Signed-off-by: Tyler Ohlsen <[email protected]>
  • Loading branch information
ohltyler authored Sep 17, 2024
1 parent 68a152d commit 4e2445d
Show file tree
Hide file tree
Showing 6 changed files with 116 additions and 28 deletions.
2 changes: 2 additions & 0 deletions common/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,8 @@ export enum COMPONENT_CLASS {
*/
export const ML_INFERENCE_DOCS_LINK =
'https://opensearch.org/docs/latest/ingest-pipelines/processors/ml-inference/#configuration-parameters';
export const ML_INFERENCE_RESPONSE_DOCS_LINK =
'https://opensearch.org/docs/latest/search-plugins/search-pipelines/ml-inference-search-response/#request-fields';
export const ML_CHOOSE_MODEL_LINK =
'https://opensearch.org/docs/latest/ml-commons-plugin/integrating-ml-models/#choosing-a-model';
export const TEXT_CHUNKING_PROCESSOR_LINK =
Expand Down
2 changes: 1 addition & 1 deletion common/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ export function getCharacterLimitedString(
: '';
}

export function customStringify(jsonObj: {}): string {
export function customStringify(jsonObj: {} | []): string {
return JSON.stringify(jsonObj, undefined, 2);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,15 @@ export class MLSearchResponseProcessor extends MLProcessor {
super();
this.id = generateId('ml_processor_search_response');
this.optionalFields = [
...(this.optionalFields || []),
{
id: 'one_to_one',
type: 'boolean',
},
...(this.optionalFields || []),
{
id: 'override',
type: 'boolean',
},
];
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import {
IngestPipelineConfig,
JSONPATH_ROOT_SELECTOR,
ML_INFERENCE_DOCS_LINK,
ML_INFERENCE_RESPONSE_DOCS_LINK,
MapArrayFormValue,
ModelInterface,
PROCESSOR_CONTEXT,
Expand All @@ -60,7 +61,7 @@ import {
parseModelInputs,
parseModelInputsObj,
} from '../../../../utils/utils';
import { MapArrayField } from '../input_fields';
import { BooleanField, MapArrayField } from '../input_fields';

interface InputTransformModalProps {
uiConfig: WorkflowConfig;
Expand All @@ -74,6 +75,9 @@ interface InputTransformModalProps {
onClose: () => void;
}

// the max number of input docs we use to display & test transforms with
const MAX_INPUT_DOCS = 10;

/**
* A modal to configure advanced JSON-to-JSON transforms into a model's expected input
*/
Expand All @@ -94,11 +98,13 @@ export function InputTransformModal(props: InputTransformModalProps) {
const [isFetching, setIsFetching] = useState<boolean>(false);

// source input / transformed input state
const [sourceInput, setSourceInput] = useState<string>('[]');
const [sourceInput, setSourceInput] = useState<string>('{}');
const [transformedInput, setTransformedInput] = useState<string>('{}');

// get the current input map
// get some current form values
const map = getIn(values, props.inputMapFieldPath) as MapArrayFormValue;
const oneToOnePath = `${props.baseConfigPath}.${props.config.id}.one_to_one`;
const oneToOne = getIn(values, oneToOnePath);

// selected transform state
const transformOptions = map.map((_, idx) => ({
Expand All @@ -116,6 +122,11 @@ export function InputTransformModal(props: InputTransformModalProps) {
// there is no model interface and/or no source input
const [isValid, setIsValid] = useState<boolean | undefined>(undefined);

const description =
props.context === PROCESSOR_CONTEXT.SEARCH_REQUEST
? 'Fetch an input query and see how it is transformed.'
: `Fetch some sample documents (up to ${MAX_INPUT_DOCS}) and see how they are transformed.`;

// hook to re-generate the transform when any inputs to the transform are updated
useEffect(() => {
if (
Expand All @@ -132,6 +143,8 @@ export function InputTransformModal(props: InputTransformModalProps) {
);
setTransformedInput(customStringify(output));
} catch {}
} else {
setTransformedInput('{}');
}
}, [map, sourceInput, selectedTransformOption]);

Expand Down Expand Up @@ -185,6 +198,11 @@ export function InputTransformModal(props: InputTransformModalProps) {
}
}, [originalPrompt, transformedInput]);

// hook to clear the source input when one_to_one is toggled
useEffect(() => {
setSourceInput('{}');
}, [oneToOne]);

return (
<EuiModal onClose={props.onClose} style={{ width: '70vw' }}>
<EuiModalHeader>
Expand All @@ -196,10 +214,28 @@ export function InputTransformModal(props: InputTransformModalProps) {
<EuiFlexGroup direction="column">
<EuiFlexItem>
<>
<EuiText color="subdued">
Fetch some sample input data and see how it is transformed.
</EuiText>
<EuiText color="subdued">{description}</EuiText>
<EuiSpacer size="s" />
{props.context === PROCESSOR_CONTEXT.SEARCH_RESPONSE && (
<>
<BooleanField
label={'One-to-one'}
fieldPath={oneToOnePath}
enabledOption={{
id: `${oneToOnePath}_true`,
label: 'True',
}}
disabledOption={{
id: `${oneToOnePath}_false`,
label: 'False',
}}
showLabel={true}
helpLink={ML_INFERENCE_RESPONSE_DOCS_LINK}
helpText="Run inference for each document separately"
/>
<EuiSpacer size="s" />
</>
)}
<EuiText>Source input</EuiText>
<EuiSmallButton
style={{ width: '100px' }}
Expand Down Expand Up @@ -306,11 +342,15 @@ export function InputTransformModal(props: InputTransformModalProps) {
)
.unwrap()
.then(async (resp) => {
const hits = resp.hits.hits.map(
(hit: SearchHit) => hit._source
);
const hits = resp.hits.hits
.map((hit: SearchHit) => hit._source)
.slice(0, MAX_INPUT_DOCS);
if (hits.length > 0) {
setSourceInput(customStringify(hits[0]));
setSourceInput(
// if one-to-one, treat the source input as a single retrieved document
// else, treat it as all of the returned documents
customStringify(oneToOne ? hits[0] : hits)
);
}
})
.catch((error: any) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import {
IngestPipelineConfig,
JSONPATH_ROOT_SELECTOR,
ML_INFERENCE_DOCS_LINK,
ML_INFERENCE_RESPONSE_DOCS_LINK,
MapArrayFormValue,
ModelInterface,
PROCESSOR_CONTEXT,
Expand All @@ -53,7 +54,7 @@ import {
useAppDispatch,
} from '../../../../store';
import { getCore } from '../../../../services';
import { MapArrayField } from '../input_fields';
import { BooleanField, MapArrayField } from '../input_fields';
import {
getDataSourceId,
parseModelOutputs,
Expand Down Expand Up @@ -83,15 +84,13 @@ export function OutputTransformModal(props: OutputTransformModalProps) {
const [isFetching, setIsFetching] = useState<boolean>(false);

// source output / transformed output state
const [sourceOutput, setSourceOutput] = useState<string>('[]');
const [sourceOutput, setSourceOutput] = useState<string>('{}');
const [transformedOutput, setTransformedOutput] = useState<string>('{}');

// get some current values
// get some current form values
const map = getIn(values, props.outputMapFieldPath) as MapArrayFormValue;
const fullResponsePath = getIn(
values,
`${props.baseConfigPath}.${props.config.id}.full_response_path`
);
const fullResponsePathPath = `${props.baseConfigPath}.${props.config.id}.full_response_path`;
const fullResponsePath = getIn(values, fullResponsePathPath);

// popover state containing the model interface details, if applicable
const [popoverOpen, setPopoverOpen] = useState<boolean>(false);
Expand Down Expand Up @@ -121,9 +120,16 @@ export function OutputTransformModal(props: OutputTransformModalProps) {
);
setTransformedOutput(customStringify(output));
} catch {}
} else {
setTransformedOutput('{}');
}
}, [map, sourceOutput, selectedTransformOption]);

// hook to clear the source output when full_response_path is toggled
useEffect(() => {
setSourceOutput('{}');
}, [fullResponsePath]);

return (
<EuiModal onClose={props.onClose} style={{ width: '70vw' }}>
<EuiModalHeader>
Expand All @@ -139,6 +145,31 @@ export function OutputTransformModal(props: OutputTransformModalProps) {
Fetch some sample output data and see how it is transformed.
</EuiText>
<EuiSpacer size="s" />
{(props.context === PROCESSOR_CONTEXT.INGEST ||
props.context === PROCESSOR_CONTEXT.SEARCH_RESPONSE) && (
<>
<BooleanField
label={'Full response path'}
fieldPath={fullResponsePathPath}
enabledOption={{
id: `${fullResponsePathPath}_true`,
label: 'True',
}}
disabledOption={{
id: `${fullResponsePathPath}_false`,
label: 'False',
}}
showLabel={true}
helpLink={
props.context === PROCESSOR_CONTEXT.INGEST
? ML_INFERENCE_DOCS_LINK
: ML_INFERENCE_RESPONSE_DOCS_LINK
}
helpText="Parse the full model output"
/>
<EuiSpacer size="s" />
</>
)}
<EuiFlexGroup direction="row" justifyContent="spaceBetween">
<EuiFlexItem>
<EuiText>Source output</EuiText>
Expand Down
29 changes: 20 additions & 9 deletions public/utils/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import {
import { getCore, getDataSourceEnabled } from '../services';
import {
MDSQueryParams,
MapEntry,
ModelInputMap,
ModelOutputMap,
} from '../../common/interfaces';
Expand Down Expand Up @@ -177,19 +178,17 @@ export function unwrapTransformedDocs(

// ML inference processors will use standard dot notation or JSONPath depending on the input.
// We follow the same logic here to generate consistent results.
export function generateTransform(input: {}, map: MapFormValue): {} {
// Collapse the values depending on if the input is an array or not.
export function generateTransform(input: {} | [], map: MapFormValue): {} | [] {
let output = {};
map.forEach((mapEntry) => {
const path = mapEntry.value;
try {
let transformedResult = undefined;
if (mapEntry.value.startsWith(JSONPATH_ROOT_SELECTOR)) {
// JSONPath transform
transformedResult = jsonpath.query(input, path);
// Standard dot notation
} else {
transformedResult = get(input, path);
}
const transformedResult = Array.isArray(input)
? input.map((inputEntry) =>
getTransformedResult(mapEntry, inputEntry, path)
)
: getTransformedResult(mapEntry, input, path);
output = {
...output,
[mapEntry.key]: transformedResult || '',
Expand All @@ -199,6 +198,18 @@ export function generateTransform(input: {}, map: MapFormValue): {} {
return output;
}

function getTransformedResult(
mapEntry: MapEntry,
input: {},
path: string
): any {
return mapEntry.value.startsWith(JSONPATH_ROOT_SELECTOR)
? // JSONPath transform
jsonpath.query(input, path)
: // Standard dot notation
get(input, path);
}

// Derive the collection of model inputs from the model interface JSONSchema into a form-ready list
export function parseModelInputs(
modelInterface: ModelInterface | undefined
Expand Down

0 comments on commit 4e2445d

Please sign in to comment.