Skip to content

Commit

Permalink
Support inserting template vars at cursor position; improve RAG preset (
Browse files Browse the repository at this point in the history
#526) (#527)

* Improve RAG preset prompt defaults



* Change prompt vars to insert at editor cursor position



* Fix spacing in configure prompt modal



* Improve default empty vals for input output map transforms



* handle npe; add check to block showing resources for custom workflow types



* add check to handle empty or missing ui_metadata



* fix UT



---------


(cherry picked from commit b613ce3)

Signed-off-by: Tyler Ohlsen <[email protected]>
Signed-off-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
  • Loading branch information
1 parent 0ec1713 commit 39fdb1a
Show file tree
Hide file tree
Showing 7 changed files with 213 additions and 85 deletions.
33 changes: 26 additions & 7 deletions common/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -426,6 +426,12 @@ export const QUERY_PRESETS = [
},
] as QueryPreset[];

/**
* DEFAULT TEMPLATE VAR NAMES
*/
export const DEFAULT_PROMPT_RESULTS_FIELD = 'results';
export const DEFAULT_PROMPT_QUESTION_FIELD = 'question';

/**
* PROMPT PRESETS
*/
Expand All @@ -434,19 +440,25 @@ export const SUMMARIZE_DOCS_PROMPT =
You are given a list of document results. You will \
analyze the data and generate a human-readable summary of the results. If you don't \
know the answer, just say I don't know.\
\n\n Results: <provide some results> \
\n\n Results: ${parameters." +
DEFAULT_PROMPT_RESULTS_FIELD +
'.toString()} \
\n\n Human: Please summarize the results.\
\n\n Assistant:";
\n\n Assistant:';

export const QA_WITH_DOCUMENTS_PROMPT =
"Human: You are a professional data analyst. \
You are given a list of document results, along with a question. You will \
analyze the results and generate a human-readable response to the question, \
based on the results. If you don't know the answer, just say I don't know.\
\n\n Results: <provide some results> \
\n\n Question: <provide some question> \
\n\n Results: ${parameters." +
DEFAULT_PROMPT_RESULTS_FIELD +
'.toString()} \
\n\n Question: ${parameters.' +
DEFAULT_PROMPT_QUESTION_FIELD +
'.toString()} \
\n\n Human: Please answer the question using the provided results.\
\n\n Assistant:";
\n\n Assistant:';

export const PROMPT_PRESETS = [
{
Expand Down Expand Up @@ -507,11 +519,18 @@ export const EMPTY_MAP_ENTRY = { key: '', value: '' } as MapEntry;
export const EMPTY_INPUT_MAP_ENTRY = {
key: '',
value: {
transformType: '' as TRANSFORM_TYPE,
transformType: TRANSFORM_TYPE.FIELD,
value: '',
},
} as InputMapEntry;
export const EMPTY_OUTPUT_MAP_ENTRY = EMPTY_INPUT_MAP_ENTRY;

export const EMPTY_OUTPUT_MAP_ENTRY = {
...EMPTY_INPUT_MAP_ENTRY,
value: {
...EMPTY_INPUT_MAP_ENTRY.value,
transformType: NO_TRANSFORMATION as TRANSFORM_TYPE,
},
};
export const MODEL_OUTPUT_SCHEMA_NESTED_PATH =
'output.properties.inference_results.items.properties.output.items.properties.dataAsMap.properties';
export const MODEL_OUTPUT_SCHEMA_FULL_PATH = 'output.properties';
Expand Down
9 changes: 2 additions & 7 deletions common/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ export type WorkflowTemplate = {
// https://github.com/opensearch-project/flow-framework/issues/526
version?: any;
workflows?: TemplateFlows;
use_case?: USE_CASE;
use_case?: string;
// UI state and any ReactFlow state may not exist if a workflow is created via API/backend-only.
ui_metadata?: UIState;
};
Expand All @@ -386,12 +386,6 @@ export type Workflow = WorkflowTemplate & {
resourcesCreated?: WorkflowResource[];
};

export enum USE_CASE {
SEMANTIC_SEARCH = 'SEMANTIC_SEARCH',
NEURAL_SPARSE_SEARCH = 'NEURAL_SPARSE_SEARCH',
HYBRID_SEARCH = 'HYBRID_SEARCH',
}

/**
********** ML PLUGIN TYPES/INTERFACES **********
*/
Expand Down Expand Up @@ -554,6 +548,7 @@ export type QuickConfigureFields = {
textField?: string;
imageField?: string;
labelField?: string;
promptField?: string;
embeddingLength?: number;
llmResponseField?: string;
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import React, { useEffect, useState } from 'react';
import { useFormikContext, getIn, Formik } from 'formik';
import { isEmpty } from 'lodash';
import { get, isEmpty } from 'lodash';
import * as yup from 'yup';
import {
EuiCodeEditor,
Expand All @@ -23,7 +23,6 @@ import {
EuiSmallButtonEmpty,
EuiSmallButtonIcon,
EuiSpacer,
EuiCopy,
EuiIconTip,
} from '@elastic/eui';
import {
Expand Down Expand Up @@ -80,6 +79,10 @@ const VALUE_FLEX_RATIO = 6;
// the max number of input docs we use to display & test transforms with (search response hits)
const MAX_INPUT_DOCS = 10;

// the prompt editor element ID. Used when fetching the element to perform functions on the
// underlying ace editor (inserting template variables at the cursor position)
const PROMPT_EDITOR_ID = 'promptEditor';

/**
* A modal to configure a prompt template. Can manually configure, include placeholder values
* using other model inputs, and/or select from a presets library. Used for configuring model
Expand Down Expand Up @@ -298,12 +301,12 @@ export function ConfigureTemplateModal(props: ConfigureTemplateModalProps) {
<EuiFlexItem grow={false}>
<EuiFlexGroup
direction="row"
justifyContent="spaceAround"
justifyContent="spaceBetween"
>
<EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiText size="m">Prompt</EuiText>
</EuiFlexItem>
<EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiPopover
button={
<EuiSmallButton
Expand Down Expand Up @@ -352,7 +355,7 @@ export function ConfigureTemplateModal(props: ConfigureTemplateModalProps) {
</EuiFlexGroup>
</EuiFlexItem>
<EuiSpacer size="s" />
<EuiFlexItem grow={false}>
<EuiFlexItem grow={false} id={PROMPT_EDITOR_ID}>
<EuiCodeEditor
mode="json"
theme="textmate"
Expand Down Expand Up @@ -386,7 +389,7 @@ export function ConfigureTemplateModal(props: ConfigureTemplateModalProps) {
<EuiFlexItem grow={false}>
<EuiIconTip
content={`Define input variables with JSONPath to extract out source data.
Inject into the prompt by clicking the "Copy" button and pasting into the prompt.`}
Insert into the prompt by clicking the "Insert" button.`}
position="right"
/>
</EuiFlexItem>
Expand Down Expand Up @@ -443,38 +446,51 @@ export function ConfigureTemplateModal(props: ConfigureTemplateModalProps) {
/>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiCopy
textToCopy={getPlaceholderString(
<EuiSmallButtonEmpty
disabled={isEmpty(
getIn(
formikProps.values,
`nestedVars.${idx}.name`
`nestedVars.${idx}.transform`
)
)}
onClick={() => {
const promptEditorParentElement = document
.getElementById(PROMPT_EDITOR_ID)
?.getElementsByClassName(
'ace_editor'
);
const promptEditor = get(
promptEditorParentElement,
'0.env.editor'
);
const promptEditorSession =
promptEditor?.session;
const cursorPosition = promptEditor?.getCursorPosition();
const valueToInsert = getPlaceholderString(
getIn(
formikProps.values,
`nestedVars.${idx}.name`
)
);
if (
promptEditorSession !== undefined &&
cursorPosition !== undefined &&
valueToInsert !== undefined &&
!isEmpty(valueToInsert)
) {
promptEditorSession.insert(
cursorPosition,
valueToInsert
);
} else {
console.error(
'Value could not be inserted'
);
}
}}
>
{(copy) => (
<EuiSmallButtonIcon
aria-label="Copy"
iconType="copy"
disabled={isEmpty(
getIn(
formikProps.values,
`nestedVars.${idx}.transform`
)
)}
color={
isEmpty(
getIn(
formikProps.values,
`nestedVars.${idx}.transform`
)
)
? 'subdued'
: 'primary'
}
onClick={copy}
/>
)}
</EuiCopy>
Insert
</EuiSmallButtonEmpty>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiSmallButtonIcon
Expand Down Expand Up @@ -518,12 +534,12 @@ export function ConfigureTemplateModal(props: ConfigureTemplateModalProps) {
<EuiFlexItem grow={false}>
<EuiFlexGroup
direction="row"
justifyContent="spaceAround"
justifyContent="spaceBetween"
>
<EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiText size="m">Prompt preview</EuiText>
</EuiFlexItem>
<EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiSmallButton
style={{ width: '100px' }}
isLoading={isFetching}
Expand Down
97 changes: 81 additions & 16 deletions public/pages/workflows/new_workflow/quick_configure_inputs.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,14 @@ import {
DEFAULT_VECTOR_FIELD,
MODEL_STATE,
Model,
ModelInterface,
OPENAI_DIMENSIONS,
QuickConfigureFields,
WORKFLOW_TYPE,
} from '../../../../common';
import { AppState } from '../../../store';
import { parseModelInputs } from '../../../utils';
import { get } from 'lodash';

interface QuickConfigureInputsProps {
workflowType?: WORKFLOW_TYPE;
Expand All @@ -44,6 +47,11 @@ export function QuickConfigureInputs(props: QuickConfigureInputsProps) {
// Deployed models state
const [deployedModels, setDeployedModels] = useState<Model[]>([]);

// Selected model interface state
const [selectedModelInterface, setSelectedModelInterface] = useState<
ModelInterface | undefined
>(undefined);

// Hook to update available deployed models
useEffect(() => {
if (models) {
Expand Down Expand Up @@ -89,6 +97,7 @@ export function QuickConfigureInputs(props: QuickConfigureInputsProps) {
case WORKFLOW_TYPE.RAG: {
defaultFieldValues = {
textField: DEFAULT_TEXT_FIELD,
promptField: '',
llmResponseField: DEFAULT_LLM_RESPONSE_FIELD,
};
break;
Expand Down Expand Up @@ -116,6 +125,7 @@ export function QuickConfigureInputs(props: QuickConfigureInputsProps) {
const selectedModel = deployedModels.find(
(model) => model.id === fieldValues.modelId
);
setSelectedModelInterface(selectedModel?.interface);
if (selectedModel?.connectorId !== undefined) {
const connector = connectors[selectedModel.connectorId];
if (connector !== undefined) {
Expand Down Expand Up @@ -150,6 +160,19 @@ export function QuickConfigureInputs(props: QuickConfigureInputsProps) {
}
}, [fieldValues.modelId, deployedModels, connectors]);

// When the model interface is defined, set a default prompt field, if applicable.
useEffect(() => {
if (
props.workflowType === WORKFLOW_TYPE.RAG &&
selectedModelInterface !== undefined
) {
setFieldValues({
...fieldValues,
promptField: get(parseModelInputs(selectedModelInterface), '0.label'),
});
}
}, [selectedModelInterface]);

return (
<>
{props.workflowType !== WORKFLOW_TYPE.CUSTOM ? (
Expand Down Expand Up @@ -325,23 +348,65 @@ export function QuickConfigureInputs(props: QuickConfigureInputsProps) {
</EuiCompressedFormRow>
)}
{props.workflowType === WORKFLOW_TYPE.RAG && (
<EuiCompressedFormRow
fullWidth={true}
label={'LLM response field'}
isInvalid={false}
helpText="The name of the field containing the large language model (LLM) response"
>
<EuiCompressedFieldText
<>
<EuiCompressedFormRow
fullWidth={true}
value={fieldValues?.llmResponseField || ''}
onChange={(e) => {
setFieldValues({
...fieldValues,
llmResponseField: e.target.value,
});
}}
/>
</EuiCompressedFormRow>
label={'Prompt field'}
isInvalid={false}
helpText={'The model input field representing the prompt'}
>
<EuiCompressedSuperSelect
data-testid="selectPromptField"
fullWidth={true}
options={parseModelInputs(selectedModelInterface).map(
(option) =>
({
value: option.label,
inputDisplay: (
<>
<EuiText size="s">{option.label}</EuiText>
</>
),
dropdownDisplay: (
<>
<EuiText size="s">{option.label}</EuiText>
<EuiText size="xs" color="subdued">
{option.type}
</EuiText>
</>
),
disabled: false,
} as EuiSuperSelectOption<string>)
)}
valueOfSelected={fieldValues?.promptField || ''}
onChange={(option: string) => {
setFieldValues({
...fieldValues,
promptField: option,
});
}}
isInvalid={false}
/>
</EuiCompressedFormRow>
<EuiSpacer size="s" />
<EuiCompressedFormRow
fullWidth={true}
label={'LLM response field'}
isInvalid={false}
helpText="The name of the field containing the large language model (LLM) response"
>
<EuiCompressedFieldText
fullWidth={true}
value={fieldValues?.llmResponseField || ''}
onChange={(e) => {
setFieldValues({
...fieldValues,
llmResponseField: e.target.value,
});
}}
/>
</EuiCompressedFormRow>
</>
)}
</EuiAccordion>
</>
Expand Down
Loading

0 comments on commit 39fdb1a

Please sign in to comment.