Skip to content

Commit

Permalink
Remove hardcoded preconfigured ELSER endpoint (elastic#201300)
Browse files Browse the repository at this point in the history
## Summary

The Index mapping will have access to default elser inference endpoint
so we do not need to hardcode endpoint names in the Kibana.

This needs to go after
#elastic/elasticsearch#117294 merges in `8.17`
and further.


https://github.com/user-attachments/assets/4a786fde-e250-440d-a9d7-2256dacc8edd

---------

Co-authored-by: Elastic Machine <[email protected]>
  • Loading branch information
Samiul-TheSoccerFan and elasticmachine authored Nov 26, 2024
1 parent c7b5b35 commit b3d638b
Show file tree
Hide file tree
Showing 11 changed files with 55 additions and 92 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,10 @@ export interface IndexErrorProps {
}

interface SemanticTextProperty extends MappingPropertyBase {
inference_id?: string;
inference_id: string;
type: 'semantic_text';
}

/*
This will be repalce once we add default elser inference_id
with the index mapping response.
*/
const ELSER_PRECONFIGURED_ENDPOINTS = '.elser-2-elasticsearch';
const isInferencePreconfigured = (inferenceId: string) => inferenceId.startsWith('.');

const parseMapping = (mappings: MappingTypeMapping) => {
Expand All @@ -56,11 +51,6 @@ const getSemanticTextFields = (
): Array<{ path: string; source: SemanticTextProperty }> => {
return Object.entries(fields).flatMap(([key, value]) => {
const currentPath: string = path ? `${path}.${key}` : key;
if (value.type === 'semantic_text') {
value = value.inference_id
? value
: { ...value, inference_id: ELSER_PRECONFIGURED_ENDPOINTS };
}
const currentField: Array<{ path: string; source: SemanticTextProperty }> =
value.type === 'semantic_text' ? [{ path: currentPath, source: value }] : [];
if (hasProperties(value)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import {
Form,
useForm,
} from '../../../public/application/components/mappings_editor/shared_imports';
import { registerTestBed } from '@kbn/test-jest-helpers';
import { findTestSubject, registerTestBed } from '@kbn/test-jest-helpers';
import { act } from 'react-dom/test-utils';
import {
SelectInferenceId,
Expand Down Expand Up @@ -152,4 +152,34 @@ describe('SelectInferenceId', () => {
expect(find('data-inference-endpoint-list').contains('endpoint-2')).toBe(true);
expect(find('data-inference-endpoint-list').contains('endpoint-3')).toBe(false);
});

it('select the first endpoint by default', () => {
find('inferenceIdButton').simulate('click');
const defaultElser = findTestSubject(
find('data-inference-endpoint-list'),
'custom-inference_.preconfigured-elser'
);
expect(defaultElser.prop('aria-checked')).toEqual(true);
});

it('does not select the other endpoints by default', () => {
find('inferenceIdButton').simulate('click');
const defaultE5 = findTestSubject(
find('data-inference-endpoint-list'),
'custom-inference_.preconfigured-e5'
);
expect(defaultE5.prop('aria-checked')).toEqual(false);

const endpoint1 = findTestSubject(
find('data-inference-endpoint-list'),
'custom-inference_endpoint-1'
);
expect(endpoint1.prop('aria-checked')).toEqual(false);

const endpoint2 = findTestSubject(
find('data-inference-endpoint-list'),
'custom-inference_endpoint-2'
);
expect(endpoint2.prop('aria-checked')).toEqual(false);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,14 @@ const SelectInferenceIdContent: React.FC<SelectInferenceIdContentProps> = ({
'data-test-subj': `custom-inference_${endpoint.inference_id}`,
checked: value === endpoint.inference_id ? 'on' : undefined,
}));
/**
* Adding this check to ensure we have the preconfigured elser endpoint selected by default.
*/
const hasInferenceSelected = newOptions.some((option) => option.checked === 'on');
if (!hasInferenceSelected && newOptions.length > 0) {
newOptions[0].checked = 'on';
}

if (value && !newOptions.find((option) => option.label === value)) {
// Sometimes we create a new endpoint but the backend is slow in updating so we need to optimistically update
const newOption: EuiSelectableOption = {
Expand Down Expand Up @@ -273,6 +281,7 @@ const SelectInferenceIdContent: React.FC<SelectInferenceIdContentProps> = ({
searchable
isLoading={isLoading}
singleSelection="always"
defaultChecked
searchProps={{
compressed: true,
placeholder: i18n.translate(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@ import { act } from 'react-dom/test-utils';
jest.mock('../../../../../../../../hooks/use_details_page_mappings_model_management', () => ({
useDetailsPageMappingsModelManagement: () => ({
fetchInferenceToModelIdMap: () => ({
e5: {
'.preconfigured_elser': {
isDeployed: false,
isDeployable: true,
trainedModelId: '.multilingual-e5-small',
trainedModelId: '.elser_model_2',
},
elser_model_2: {
'.preconfigured_e5': {
isDeployed: false,
isDeployable: true,
trainedModelId: '.elser_model_2',
trainedModelId: '.multilingual-e5-small',
},
openai: {
isDeployed: false,
Expand Down Expand Up @@ -49,13 +49,13 @@ const mockField: Record<string, SemanticTextField> = {
elser_model_2: {
name: 'name',
type: 'semantic_text',
inference_id: 'elser_model_2',
inference_id: '.preconfigured_elser',
reference_field: 'title',
},
e5: {
name: 'name',
type: 'semantic_text',
inference_id: 'e5',
inference_id: '.preconfigured_e5',
reference_field: 'title',
},
openai: {
Expand Down Expand Up @@ -100,15 +100,15 @@ const mockDispatch = jest.fn();
jest.mock('../../../../../mappings_state_context', () => ({
useMappingsState: jest.fn().mockReturnValue({
inferenceToModelIdMap: {
e5: {
'.preconfigured_elser': {
isDeployed: false,
isDeployable: true,
trainedModelId: '.multilingual-e5-small',
trainedModelId: '.elser_model_2',
},
elser_model_2: {
'.preconfigured_e5': {
isDeployed: false,
isDeployable: true,
trainedModelId: '.elser_model_2',
trainedModelId: '.multilingual-e5-small',
},
openai: {
isDeployed: false,
Expand Down Expand Up @@ -142,7 +142,7 @@ jest.mock('../../../../../../../services/api', () => ({
getInferenceEndpoints: jest.fn().mockResolvedValue({
data: [
{
inference_id: 'e5',
inference_id: '.preconfigured_e5',
task_type: 'text_embedding',
service: 'elasticsearch',
service_settings: {
Expand Down Expand Up @@ -212,28 +212,6 @@ describe('useSemanticText', () => {
mockConfig.openai.modelConfig
);
});
it('should handle semantic text with inference endpoint created from flyout correctly', async () => {
const { result } = renderHook(() =>
useSemanticText({
form: mockForm.elasticModelEndpointCreatedfromFlyout,
setErrorsInTrainedModelDeployment: jest.fn(),
ml: mlMock,
})
);
await act(async () => {
result.current.handleSemanticText(mockField.my_elser_endpoint, mockConfig.elser);
});

expect(mockDispatch).toHaveBeenCalledWith({
type: 'field.add',
value: mockField.my_elser_endpoint,
});
expect(mlMock.mlApi.inferenceModels.createInferenceEndpoint).toHaveBeenCalledWith(
'my_elser_endpoint',
'sparse_embedding',
mockConfig.elser.modelConfig
);
});

it('should handle semantic text correctly', async () => {
const { result } = renderHook(() =>
Expand All @@ -252,20 +230,6 @@ describe('useSemanticText', () => {
type: 'field.add',
value: mockField.elser_model_2,
});
expect(mlMock.mlApi.inferenceModels.createInferenceEndpoint).toHaveBeenCalledWith(
'elser_model_2',
'sparse_embedding',
{
service: 'elser',
service_settings: {
adaptive_allocations: {
enabled: true,
},
num_threads: 1,
model_id: '.elser_model_2',
},
}
);
});
it('does not call create inference endpoint api, if default endpoint already exists', async () => {
const { result } = renderHook(() =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import { useMLModelNotificationToasts } from '../../../../../../../../hooks/use_

import { getInferenceEndpoints } from '../../../../../../../services/api';
import { getFieldByPathName } from '../../../../../lib/utils';
import { ELSER_PRECONFIGURED_ENDPOINTS } from '../../../../../constants';

interface UseSemanticTextProps {
form: FormHook<Field, Field>;
Expand Down Expand Up @@ -63,9 +62,6 @@ export function useSemanticText(props: UseSemanticTextProps) {
if (!form.getFormData().reference_field) {
form.setFieldValue('reference_field', referenceField);
}
if (!form.getFormData().inference_id) {
form.setFieldValue('inference_id', ELSER_PRECONFIGURED_ENDPOINTS);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [fieldTypeValue]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,7 @@ import { i18n } from '@kbn/i18n';

import { NormalizedField, NormalizedFields, State } from '../../../types';
import { getTypeLabelFromField } from '../../../lib';
import {
CHILD_FIELD_INDENT_SIZE,
ELSER_PRECONFIGURED_ENDPOINTS,
LEFT_PADDING_SIZE_FIELD_ITEM_WRAPPER,
} from '../../../constants';
import { CHILD_FIELD_INDENT_SIZE, LEFT_PADDING_SIZE_FIELD_ITEM_WRAPPER } from '../../../constants';

import { FieldsList } from './fields_list';
import { CreateField } from './create_field';
Expand Down Expand Up @@ -109,7 +105,6 @@ function FieldListItemComponent(
const indent = treeDepth * CHILD_FIELD_INDENT_SIZE - substractIndentAmount;

const isSemanticText = source.type === 'semantic_text';
const inferenceId: string = (source.inference_id as string) ?? ELSER_PRECONFIGURED_ENDPOINTS;

const indentCreateField =
(treeDepth + 1) * CHILD_FIELD_INDENT_SIZE +
Expand Down Expand Up @@ -298,7 +293,7 @@ function FieldListItemComponent(

{isSemanticText && (
<EuiFlexItem grow={false}>
<EuiBadge color="hollow">{inferenceId}</EuiBadge>
<EuiBadge color="hollow">{source.inference_id as string}</EuiBadge>
</EuiFlexItem>
)}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,3 @@
export const INDEX_DEFAULT = 'index_default';

export const STANDARD = 'standard';

/*
This will be repalce once we add default elser inference_id
with the index mapping response.
*/
export const ELSER_PRECONFIGURED_ENDPOINTS = '.elser-2-elasticsearch';
Original file line number Diff line number Diff line change
Expand Up @@ -1126,22 +1126,10 @@ export const PARAMETERS_DEFINITION: { [key in ParameterName]: ParameterDefinitio
},
inference_id: {
fieldConfig: {
defaultValue: 'elser_model_2',
defaultValue: '',
label: i18n.translate('xpack.idxMgmt.mappingsEditor.parameters.inferenceIdLabel', {
defaultMessage: 'Select an inference endpoint:',
}),
validations: [
{
validator: emptyField(
i18n.translate(
'xpack.idxMgmt.mappingsEditor.parameters.validations.inferenceIdIsRequiredErrorMessage',
{
defaultMessage: 'Inference ID is required.',
}
)
),
},
],
},
schema: t.string,
},
Expand Down
1 change: 0 additions & 1 deletion x-pack/plugins/translations/translations/fr-FR.json
Original file line number Diff line number Diff line change
Expand Up @@ -23310,7 +23310,6 @@
"xpack.idxMgmt.mappingsEditor.parameters.validations.fieldDataFrequency.numberGreaterThanOneErrorMessage": "La valeur doit être supérieure à un.",
"xpack.idxMgmt.mappingsEditor.parameters.validations.greaterThanZeroErrorMessage": "Le facteur de montée en charge doit être supérieur à 0.",
"xpack.idxMgmt.mappingsEditor.parameters.validations.ignoreAboveIsRequiredErrorMessage": "Limite de longueur de caractère obligatoire.",
"xpack.idxMgmt.mappingsEditor.parameters.validations.inferenceIdIsRequiredErrorMessage": "L’ID d’inférence est requis.",
"xpack.idxMgmt.mappingsEditor.parameters.validations.localeFieldRequiredErrorMessage": "Spécifiez un paramètre régional.",
"xpack.idxMgmt.mappingsEditor.parameters.validations.maxInputLengthFieldRequiredErrorMessage": "Spécifiez une longueur d'entrée maximale.",
"xpack.idxMgmt.mappingsEditor.parameters.validations.nameIsRequiredErrorMessage": "Donnez un nom au champ.",
Expand Down
1 change: 0 additions & 1 deletion x-pack/plugins/translations/translations/ja-JP.json
Original file line number Diff line number Diff line change
Expand Up @@ -23282,7 +23282,6 @@
"xpack.idxMgmt.mappingsEditor.parameters.validations.fieldDataFrequency.numberGreaterThanOneErrorMessage": "値は1よりも大きい値でなければなりません。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.greaterThanZeroErrorMessage": "スケーリングファクターは0よりも大きくなくてはなりません。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.ignoreAboveIsRequiredErrorMessage": "文字数制限が必要です。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.inferenceIdIsRequiredErrorMessage": "推論IDは必須です。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.localeFieldRequiredErrorMessage": "ロケールを指定します。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.maxInputLengthFieldRequiredErrorMessage": "最大入力長さを指定します。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.nameIsRequiredErrorMessage": "フィールドに名前を付けます。",
Expand Down
1 change: 0 additions & 1 deletion x-pack/plugins/translations/translations/zh-CN.json
Original file line number Diff line number Diff line change
Expand Up @@ -22886,7 +22886,6 @@
"xpack.idxMgmt.mappingsEditor.parameters.validations.fieldDataFrequency.numberGreaterThanOneErrorMessage": "值必须大于 1。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.greaterThanZeroErrorMessage": "缩放因数必须大于 0。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.ignoreAboveIsRequiredErrorMessage": "字符长度限制必填。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.inferenceIdIsRequiredErrorMessage": "'推理 ID'必填。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.localeFieldRequiredErrorMessage": "指定区域设置。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.maxInputLengthFieldRequiredErrorMessage": "指定最大输入长度。",
"xpack.idxMgmt.mappingsEditor.parameters.validations.nameIsRequiredErrorMessage": "为字段提供名称。",
Expand Down

0 comments on commit b3d638b

Please sign in to comment.