Skip to content

Commit

Permalink
[Enterprise Search] fix ml inference with api index
Browse files Browse the repository at this point in the history
Updates the the Pipelines logic to ensure you can configure ml inference
pipelines with api-based indices.
  • Loading branch information
TattdCodeMonkey committed Oct 4, 2022
1 parent 7f3541c commit d0a6899
Show file tree
Hide file tree
Showing 4 changed files with 55 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ export const IngestPipelinesCard: React.FC = () => {
<EuiFlexGroup alignItems="center">
<EuiFlexItem>
<EuiTitle size="xs">
<h4>{pipelineState.name}</h4>
<h4>{pipelineName}</h4>
</EuiTitle>
</EuiFlexItem>
<EuiFlexItem grow={false}>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,8 @@ import { PipelinesJSONConfigurations } from './pipelines_json_configurations';
import { PipelinesLogic } from './pipelines_logic';

export const SearchIndexPipelines: React.FC = () => {
const {
showAddMlInferencePipelineModal,
hasIndexIngestionPipeline,
index,
pipelineState: { name: pipelineName },
} = useValues(PipelinesLogic);
const { showAddMlInferencePipelineModal, hasIndexIngestionPipeline, index, pipelineName } =
useValues(PipelinesLogic);
const { closeAddMlInferencePipelineModal, openAddMlInferencePipelineModal } =
useActions(PipelinesLogic);
const apiIndex = isApiIndex(index);
Expand Down Expand Up @@ -133,7 +129,7 @@ export const SearchIndexPipelines: React.FC = () => {
'xpack.enterpriseSearch.content.indices.pipelines.mlInferencePipelines.subtitleAPIindex',
{
defaultMessage:
"Inference pipelines will be run as processors from the Enterprise Search Ingest Pipeline. In order to use these pipeline on API-based indices you'll need to reference the {pipelineName} pipeline in your API requests.",
"Inference pipelines will be run as processors from the Enterprise Search Ingest Pipeline. In order to use these pipelines on API-based indices you'll need to reference the {pipelineName} pipeline in your API requests.",
values: {
pipelineName,
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,13 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { LogicMounter, mockFlashMessageHelpers } from '../../../../__mocks__/kea_logic';
import { connectorIndex } from '../../../__mocks__/view_index.mock';
import { apiIndex, connectorIndex } from '../../../__mocks__/view_index.mock';

import { IngestPipeline } from '@elastic/elasticsearch/lib/api/types';

import { UpdatePipelineApiLogic } from '../../../api/connector/update_pipeline_api_logic';
import { FetchCustomPipelineApiLogic } from '../../../api/index/fetch_custom_pipeline_api_logic';
import { FetchIndexApiLogic } from '../../../api/index/fetch_index_api_logic';

import { PipelinesLogic } from './pipelines_logic';
Expand Down Expand Up @@ -40,6 +42,7 @@ describe('PipelinesLogic', () => {
const { mount } = new LogicMounter(PipelinesLogic);
const { mount: mountFetchIndexApiLogic } = new LogicMounter(FetchIndexApiLogic);
const { mount: mountUpdatePipelineLogic } = new LogicMounter(UpdatePipelineApiLogic);
const { mount: mountFetchCustomPipelineApiLogic } = new LogicMounter(FetchCustomPipelineApiLogic);
const { clearFlashMessages, flashAPIErrors, flashSuccessToast } = mockFlashMessageHelpers;

const newPipeline = {
Expand All @@ -51,6 +54,7 @@ describe('PipelinesLogic', () => {
beforeEach(() => {
jest.clearAllMocks();
mountFetchIndexApiLogic();
mountFetchCustomPipelineApiLogic();
mountUpdatePipelineLogic();
mount();
});
Expand Down Expand Up @@ -195,5 +199,41 @@ describe('PipelinesLogic', () => {
});
});
});
describe('fetchCustomPipelineSuccess', () => {
it('should support api indices with custom ingest pipelines', () => {
PipelinesLogic.actions.fetchIndexApiSuccess({
...apiIndex,
});
const indexName = apiIndex.name;
const indexPipelines: Record<string, IngestPipeline> = {
[indexName]: {
processors: [],
version: 1,
},
[`${indexName}@custom`]: {
processors: [],
version: 1,
},
[`${indexName}@ml-inference`]: {
processors: [],
version: 1,
},
};
PipelinesLogic.actions.fetchCustomPipelineSuccess(indexPipelines);

expect(PipelinesLogic.values).toEqual({
...DEFAULT_VALUES,
customPipelineData: indexPipelines,
index: {
...apiIndex,
},
indexName,
pipelineName: indexName,
canSetPipeline: false,
hasIndexIngestionPipeline: true,
canUseMlInferencePipeline: true,
});
});
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ type PipelinesActions = Pick<
FetchCustomPipelineApiLogicArgs,
FetchCustomPipelineApiLogicResponse
>['makeRequest'];
fetchCustomPipelineSuccess: Actions<
FetchCustomPipelineApiLogicArgs,
FetchCustomPipelineApiLogicResponse
>['apiSuccess'];
fetchDefaultPipeline: Actions<undefined, FetchDefaultPipelineResponse>['makeRequest'];
fetchDefaultPipelineSuccess: Actions<undefined, FetchDefaultPipelineResponse>['apiSuccess'];
fetchIndexApiSuccess: Actions<FetchIndexApiParams, FetchIndexApiResponse>['apiSuccess'];
Expand Down Expand Up @@ -143,7 +147,7 @@ export const PipelinesLogic = kea<MakeLogicType<PipelinesValues, PipelinesAction
FetchDefaultPipelineApiLogic,
['apiSuccess as fetchDefaultPipelineSuccess', 'makeRequest as fetchDefaultPipeline'],
FetchCustomPipelineApiLogic,
['makeRequest as fetchCustomPipeline'],
['apiSuccess as fetchCustomPipelineSuccess', 'makeRequest as fetchCustomPipeline'],
FetchMlInferencePipelineProcessorsApiLogic,
[
'makeRequest as fetchMlInferenceProcessors',
Expand Down Expand Up @@ -303,16 +307,12 @@ export const PipelinesLogic = kea<MakeLogicType<PipelinesValues, PipelinesAction
(index: ElasticsearchIndexWithIngestion) => !isApiIndex(index),
],
canUseMlInferencePipeline: [
() => [
selectors.canSetPipeline,
selectors.hasIndexIngestionPipeline,
selectors.pipelineState,
],
() => [selectors.hasIndexIngestionPipeline, selectors.pipelineState, selectors.index],
(
canSetPipeline: boolean,
hasIndexIngestionPipeline: boolean,
pipelineState: IngestPipelineParams
) => canSetPipeline && hasIndexIngestionPipeline && pipelineState.run_ml_inference,
pipelineState: IngestPipelineParams,
index: ElasticsearchIndexWithIngestion
) => hasIndexIngestionPipeline && (pipelineState.run_ml_inference || isApiIndex(index)),
],
defaultPipelineValues: [
() => [selectors.defaultPipelineValuesData],
Expand Down

0 comments on commit d0a6899

Please sign in to comment.