Skip to content

Commit

Permalink
[8.x] [ML] Trained Models: Optimize trained models Kibana API (#200977)…
Browse files Browse the repository at this point in the history
… (#203087)

# Backport

This will backport the following commits from `main` to `8.x`:
- [[ML] Trained Models: Optimize trained models Kibana API
(#200977)](#200977)

<!--- Backport version: 8.9.8 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sqren/backport)

<!--BACKPORT [{"author":{"name":"Dima
Arnautov","email":"[email protected]"},"sourceCommit":{"committedDate":"2024-12-04T18:50:18Z","message":"[ML]
Trained Models: Optimize trained models Kibana API (#200977)\n\n##
Summary\r\n\r\nCloses #191939 \r\nCloses
https://github.com/elastic/kibana/issues/175220\r\n\r\nAdds various
optimizations for the Trained Models page:\r\n\r\n---\r\n\r\n- Creates a
new Kibana `/trained_models_list` endpoint responsible for\r\nfetching
complete data for the Trained Model UI page, including\r\npipelines,
indices and stats.\r\n\r\nBefore the Trained Models page required 3
endpoints. The new\r\n`trained_models_list` replaces them, reducing the
overall latency.\r\n\r\n<img width=\"715\" alt=\"Screenshot 2024-12-02
at 16 18
32\"\r\nsrc=\"https://github.com/user-attachments/assets/34bebbdc-ae80-4e08-8512-199c57cb5b54\">\r\n\r\n\r\n---\r\n\r\n-
Optimized fetching of pipelines, indices and stats, reducing
the\r\nnumber of API calls to ES\r\n\r\nSeveral issues with the old
endpoint stemmed from the with_indices flag.\r\nThis flag triggered a
method designed for the Model Map feature, which\r\ninvolved fetching a
complete list of pipelines, iterating over each\r\nmodel, retrieving
index settings multiple times, and obtaining both\r\nindex content and a
full list of transforms.\r\n\r\nThe new endpoint solves these issues by
fetching only the necessary\r\ninformation for the Trained Model page
with minimal calls to\r\nElasticsearch.\r\n\r\n#### APM transaction with
a new endpoint \r\n<img width=\"1822\"
alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/55e4a5f0-e571-46a2-b7ad-5b5a6fc44ceb\">\r\n\r\n####
APM transaction with an old
endpoint\r\n\r\n\r\nhttps://github.com/user-attachments/assets/c9d62ddb-5e13-4ac1-9cbf-d685fbed7808\r\n\r\n---\r\n\r\n-
Improves type definitions for different model types\r\n\r\n###
Checklist\r\n\r\n- [x] [Unit or
functional\r\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\r\nwere
updated or added to match the most common
scenarios","sha":"e067fa239de670123c4f7d6aaba3d6001796babe","branchLabelMapping":{"^v9.0.0$":"main","^v8.18.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:enhancement",":ml","Feature:Data
Frame Analytics","v9.0.0","Feature:3rd Party
Models","Team:ML","backport:version","v8.17.0","v8.18.0"],"number":200977,"url":"https://github.com/elastic/kibana/pull/200977","mergeCommit":{"message":"[ML]
Trained Models: Optimize trained models Kibana API (#200977)\n\n##
Summary\r\n\r\nCloses #191939 \r\nCloses
https://github.com/elastic/kibana/issues/175220\r\n\r\nAdds various
optimizations for the Trained Models page:\r\n\r\n---\r\n\r\n- Creates a
new Kibana `/trained_models_list` endpoint responsible for\r\nfetching
complete data for the Trained Model UI page, including\r\npipelines,
indices and stats.\r\n\r\nBefore the Trained Models page required 3
endpoints. The new\r\n`trained_models_list` replaces them, reducing the
overall latency.\r\n\r\n<img width=\"715\" alt=\"Screenshot 2024-12-02
at 16 18
32\"\r\nsrc=\"https://github.com/user-attachments/assets/34bebbdc-ae80-4e08-8512-199c57cb5b54\">\r\n\r\n\r\n---\r\n\r\n-
Optimized fetching of pipelines, indices and stats, reducing
the\r\nnumber of API calls to ES\r\n\r\nSeveral issues with the old
endpoint stemmed from the with_indices flag.\r\nThis flag triggered a
method designed for the Model Map feature, which\r\ninvolved fetching a
complete list of pipelines, iterating over each\r\nmodel, retrieving
index settings multiple times, and obtaining both\r\nindex content and a
full list of transforms.\r\n\r\nThe new endpoint solves these issues by
fetching only the necessary\r\ninformation for the Trained Model page
with minimal calls to\r\nElasticsearch.\r\n\r\n#### APM transaction with
a new endpoint \r\n<img width=\"1822\"
alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/55e4a5f0-e571-46a2-b7ad-5b5a6fc44ceb\">\r\n\r\n####
APM transaction with an old
endpoint\r\n\r\n\r\nhttps://github.com/user-attachments/assets/c9d62ddb-5e13-4ac1-9cbf-d685fbed7808\r\n\r\n---\r\n\r\n-
Improves type definitions for different model types\r\n\r\n###
Checklist\r\n\r\n- [x] [Unit or
functional\r\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\r\nwere
updated or added to match the most common
scenarios","sha":"e067fa239de670123c4f7d6aaba3d6001796babe"}},"sourceBranch":"main","suggestedTargetBranches":["8.17","8.x"],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","labelRegex":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/200977","number":200977,"mergeCommit":{"message":"[ML]
Trained Models: Optimize trained models Kibana API (#200977)\n\n##
Summary\r\n\r\nCloses #191939 \r\nCloses
https://github.com/elastic/kibana/issues/175220\r\n\r\nAdds various
optimizations for the Trained Models page:\r\n\r\n---\r\n\r\n- Creates a
new Kibana `/trained_models_list` endpoint responsible for\r\nfetching
complete data for the Trained Model UI page, including\r\npipelines,
indices and stats.\r\n\r\nBefore the Trained Models page required 3
endpoints. The new\r\n`trained_models_list` replaces them, reducing the
overall latency.\r\n\r\n<img width=\"715\" alt=\"Screenshot 2024-12-02
at 16 18
32\"\r\nsrc=\"https://github.com/user-attachments/assets/34bebbdc-ae80-4e08-8512-199c57cb5b54\">\r\n\r\n\r\n---\r\n\r\n-
Optimized fetching of pipelines, indices and stats, reducing
the\r\nnumber of API calls to ES\r\n\r\nSeveral issues with the old
endpoint stemmed from the with_indices flag.\r\nThis flag triggered a
method designed for the Model Map feature, which\r\ninvolved fetching a
complete list of pipelines, iterating over each\r\nmodel, retrieving
index settings multiple times, and obtaining both\r\nindex content and a
full list of transforms.\r\n\r\nThe new endpoint solves these issues by
fetching only the necessary\r\ninformation for the Trained Model page
with minimal calls to\r\nElasticsearch.\r\n\r\n#### APM transaction with
a new endpoint \r\n<img width=\"1822\"
alt=\"image\"\r\nsrc=\"https://github.com/user-attachments/assets/55e4a5f0-e571-46a2-b7ad-5b5a6fc44ceb\">\r\n\r\n####
APM transaction with an old
endpoint\r\n\r\n\r\nhttps://github.com/user-attachments/assets/c9d62ddb-5e13-4ac1-9cbf-d685fbed7808\r\n\r\n---\r\n\r\n-
Improves type definitions for different model types\r\n\r\n###
Checklist\r\n\r\n- [x] [Unit or
functional\r\ntests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)\r\nwere
updated or added to match the most common
scenarios","sha":"e067fa239de670123c4f7d6aaba3d6001796babe"}},{"branch":"8.17","label":"v8.17.0","labelRegex":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"8.x","label":"v8.18.0","labelRegex":"^v8.18.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}]
BACKPORT-->
  • Loading branch information
darnautov authored Dec 5, 2024
1 parent e48a7e8 commit 1f61f5b
Show file tree
Hide file tree
Showing 45 changed files with 1,239 additions and 1,051 deletions.
164 changes: 138 additions & 26 deletions x-pack/plugins/ml/common/types/trained_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,25 @@
* 2.0.
*/
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import type { TrainedModelType } from '@kbn/ml-trained-models-utils';
import type {
InferenceInferenceEndpointInfo,
MlInferenceConfigCreateContainer,
} from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import type {
ModelDefinitionResponse,
ModelState,
TrainedModelType,
} from '@kbn/ml-trained-models-utils';
import {
BUILT_IN_MODEL_TAG,
ELASTIC_MODEL_TAG,
TRAINED_MODEL_TYPE,
} from '@kbn/ml-trained-models-utils';
import type {
DataFrameAnalyticsConfig,
FeatureImportanceBaseline,
TotalFeatureImportance,
} from '@kbn/ml-data-frame-analytics-utils';
import type { IndexName, IndicesIndexState } from '@elastic/elasticsearch/lib/api/types';
import type { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import type { XOR } from './common';
import type { MlSavedObjectType } from './saved_objects';

Expand Down Expand Up @@ -95,33 +106,12 @@ export type PutTrainedModelConfig = {
>; // compressed_definition and definition are mutually exclusive

export type TrainedModelConfigResponse = estypes.MlTrainedModelConfig & {
/**
* Associated pipelines. Extends response from the ES endpoint.
*/
pipelines?: Record<string, PipelineDefinition> | null;
origin_job_exists?: boolean;

metadata?: {
analytics_config: DataFrameAnalyticsConfig;
metadata?: estypes.MlTrainedModelConfig['metadata'] & {
analytics_config?: DataFrameAnalyticsConfig;
input: unknown;
total_feature_importance?: TotalFeatureImportance[];
feature_importance_baseline?: FeatureImportanceBaseline;
model_aliases?: string[];
} & Record<string, unknown>;
model_id: string;
model_type: TrainedModelType;
tags: string[];
version: string;
inference_config?: Record<string, any>;
indices?: Array<Record<IndexName, IndicesIndexState | null>>;
/**
* Whether the model has inference services
*/
hasInferenceServices?: boolean;
/**
* Inference services associated with the model
*/
inference_apis?: InferenceAPIConfigResponse[];
};

export interface PipelineDefinition {
Expand Down Expand Up @@ -309,3 +299,125 @@ export interface ModelDownloadState {
total_parts: number;
downloaded_parts: number;
}

export type Stats = Omit<TrainedModelStat, 'model_id' | 'deployment_stats'>;

/**
* Additional properties for all items in the Trained models table
* */
interface BaseModelItem {
type?: string[];
tags: string[];
/**
* Whether the model has inference services
*/
hasInferenceServices?: boolean;
/**
* Inference services associated with the model
*/
inference_apis?: InferenceInferenceEndpointInfo[];
/**
* Associated pipelines. Extends response from the ES endpoint.
*/
pipelines?: Record<string, PipelineDefinition>;
/**
* Indices with associated pipelines that have inference processors utilizing the model deployments.
*/
indices?: string[];
}

/** Common properties for existing NLP models and NLP model download configs */
interface BaseNLPModelItem extends BaseModelItem {
disclaimer?: string;
recommended?: boolean;
supported?: boolean;
state: ModelState | undefined;
downloadState?: ModelDownloadState;
}

/** Model available for download */
export type ModelDownloadItem = BaseNLPModelItem &
Omit<ModelDefinitionResponse, 'version' | 'config'> & {
putModelConfig?: object;
softwareLicense?: string;
};
/** Trained NLP model, i.e. pytorch model returned by the trained_models API */
export type NLPModelItem = BaseNLPModelItem &
TrainedModelItem & {
stats: Stats & { deployment_stats: TrainedModelDeploymentStatsResponse[] };
/**
* Description of the current model state
*/
stateDescription?: string;
/**
* Deployment ids extracted from the deployment stats
*/
deployment_ids: string[];
};

export function isBaseNLPModelItem(item: unknown): item is BaseNLPModelItem {
return (
typeof item === 'object' &&
item !== null &&
'type' in item &&
Array.isArray(item.type) &&
item.type.includes(TRAINED_MODEL_TYPE.PYTORCH)
);
}

export function isNLPModelItem(item: unknown): item is NLPModelItem {
return isExistingModel(item) && item.model_type === TRAINED_MODEL_TYPE.PYTORCH;
}

export const isElasticModel = (item: TrainedModelConfigResponse) =>
item.tags.includes(ELASTIC_MODEL_TAG);

export type ExistingModelBase = TrainedModelConfigResponse & BaseModelItem;

/** Any model returned by the trained_models API, e.g. lang_ident, elser, dfa model */
export type TrainedModelItem = ExistingModelBase & { stats: Stats };

/** Trained DFA model */
export type DFAModelItem = Omit<TrainedModelItem, 'inference_config'> & {
origin_job_exists?: boolean;
inference_config?: Pick<MlInferenceConfigCreateContainer, 'classification' | 'regression'>;
metadata?: estypes.MlTrainedModelConfig['metadata'] & {
analytics_config: DataFrameAnalyticsConfig;
input: unknown;
total_feature_importance?: TotalFeatureImportance[];
feature_importance_baseline?: FeatureImportanceBaseline;
} & Record<string, unknown>;
};

export type TrainedModelWithPipelines = TrainedModelItem & {
pipelines: Record<string, PipelineDefinition>;
};

export function isExistingModel(item: unknown): item is TrainedModelItem {
return (
typeof item === 'object' &&
item !== null &&
'model_type' in item &&
'create_time' in item &&
!!item.create_time
);
}

export function isDFAModelItem(item: unknown): item is DFAModelItem {
return isExistingModel(item) && item.model_type === TRAINED_MODEL_TYPE.TREE_ENSEMBLE;
}

export function isModelDownloadItem(item: TrainedModelUIItem): item is ModelDownloadItem {
return 'putModelConfig' in item && !!item.type?.includes(TRAINED_MODEL_TYPE.PYTORCH);
}

export const isBuiltInModel = (item: TrainedModelConfigResponse | TrainedModelUIItem) =>
item.tags.includes(BUILT_IN_MODEL_TAG);
/**
* This type represents a union of different model entities:
* - Any existing trained model returned by the API, e.g., lang_ident_model_1, DFA models, etc.
* - Hosted model configurations available for download, e.g., ELSER or E5
* - NLP models already downloaded into Elasticsearch
* - DFA models
*/
export type TrainedModelUIItem = TrainedModelItem | ModelDownloadItem | NLPModelItem | DFAModelItem;
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import {
import { i18n } from '@kbn/i18n';
import { extractErrorProperties } from '@kbn/ml-error-utils';

import type { ModelItem } from '../../model_management/models_list';
import type { DFAModelItem } from '../../../../common/types/trained_models';
import type { AddInferencePipelineSteps } from './types';
import { ADD_INFERENCE_PIPELINE_STEPS } from './constants';
import { AddInferencePipelineFooter } from '../shared';
Expand All @@ -39,7 +39,7 @@ import { useFetchPipelines } from './hooks/use_fetch_pipelines';

export interface AddInferencePipelineFlyoutProps {
onClose: () => void;
model: ModelItem;
model: DFAModelItem;
}

export const AddInferencePipelineFlyout: FC<AddInferencePipelineFlyoutProps> = ({
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import {
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n-react';
import { CodeEditor } from '@kbn/code-editor';
import type { ModelItem } from '../../../model_management/models_list';
import type { DFAModelItem } from '../../../../../common/types/trained_models';
import {
EDIT_MESSAGE,
CANCEL_EDIT_MESSAGE,
Expand Down Expand Up @@ -56,9 +56,9 @@ interface Props {
condition?: string;
fieldMap: MlInferenceState['fieldMap'];
handleAdvancedConfigUpdate: (configUpdate: Partial<MlInferenceState>) => void;
inferenceConfig: ModelItem['inference_config'];
modelInferenceConfig: ModelItem['inference_config'];
modelInputFields: ModelItem['input'];
inferenceConfig: DFAModelItem['inference_config'];
modelInferenceConfig: DFAModelItem['inference_config'];
modelInputFields: DFAModelItem['input'];
modelType?: InferenceModelTypes;
setHasUnsavedChanges: React.Dispatch<React.SetStateAction<boolean>>;
tag?: string;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@
*/

import { getAnalysisType } from '@kbn/ml-data-frame-analytics-utils';
import type { DFAModelItem } from '../../../../common/types/trained_models';
import type { MlInferenceState } from './types';
import type { ModelItem } from '../../model_management/models_list';

export const getModelType = (model: ModelItem): string | undefined => {
export const getModelType = (model: DFAModelItem): string | undefined => {
const analysisConfig = model.metadata?.analytics_config?.analysis;
return analysisConfig !== undefined ? getAnalysisType(analysisConfig) : undefined;
};
Expand Down Expand Up @@ -54,13 +54,17 @@ export const getDefaultOnFailureConfiguration = (): MlInferenceState['onFailure'
},
];

export const getInitialState = (model: ModelItem): MlInferenceState => {
export const getInitialState = (model: DFAModelItem): MlInferenceState => {
const modelType = getModelType(model);
let targetField;

if (modelType !== undefined) {
targetField = model.inference_config
? `ml.inference.${model.inference_config[modelType].results_field}`
? `ml.inference.${
model.inference_config[
modelType as keyof Exclude<DFAModelItem['inference_config'], undefined>
]!.results_field
}`
: undefined;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ export function AnalyticsIdSelector({
async function fetchAnalyticsModels() {
setIsLoading(true);
try {
// FIXME should if fetch all trained models?
const response = await trainedModelsApiService.getTrainedModels();
setTrainedModels(response);
} catch (e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,12 @@ import { FormattedMessage } from '@kbn/i18n-react';
import React, { type FC, useMemo, useState } from 'react';
import { groupBy } from 'lodash';
import { ElandPythonClient } from '@kbn/inference_integration_flyout';
import type { ModelDownloadItem } from '../../../common/types/trained_models';
import { usePermissionCheck } from '../capabilities/check_capabilities';
import { useMlKibana } from '../contexts/kibana';
import type { ModelItem } from './models_list';

export interface AddModelFlyoutProps {
modelDownloads: ModelItem[];
modelDownloads: ModelDownloadItem[];
onClose: () => void;
onSubmit: (modelId: string) => void;
}
Expand Down Expand Up @@ -138,7 +138,7 @@ export const AddModelFlyout: FC<AddModelFlyoutProps> = ({ onClose, onSubmit, mod
};

interface ClickToDownloadTabContentProps {
modelDownloads: ModelItem[];
modelDownloads: ModelDownloadItem[];
onModelDownload: (modelId: string) => void;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import { i18n } from '@kbn/i18n';
import { extractErrorProperties } from '@kbn/ml-error-utils';
import type { SupportedPytorchTasksType } from '@kbn/ml-trained-models-utils';

import type { ModelItem } from '../models_list';
import type { TrainedModelItem } from '../../../../common/types/trained_models';
import type { AddInferencePipelineSteps } from '../../components/ml_inference/types';
import { ADD_INFERENCE_PIPELINE_STEPS } from '../../components/ml_inference/constants';
import { AddInferencePipelineFooter } from '../../components/shared';
Expand All @@ -40,7 +40,7 @@ import { useTestTrainedModelsContext } from '../test_models/test_trained_models_

export interface CreatePipelineForModelFlyoutProps {
onClose: (refreshList?: boolean) => void;
model: ModelItem;
model: TrainedModelItem;
}

export const CreatePipelineForModelFlyout: FC<CreatePipelineForModelFlyoutProps> = ({
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@

import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import type { IngestInferenceProcessor } from '@elastic/elasticsearch/lib/api/types';
import type { TrainedModelItem } from '../../../../common/types/trained_models';
import { getDefaultOnFailureConfiguration } from '../../components/ml_inference/state';
import type { ModelItem } from '../models_list';

export interface InferecePipelineCreationState {
creatingPipeline: boolean;
Expand All @@ -26,7 +26,7 @@ export interface InferecePipelineCreationState {
}

export const getInitialState = (
model: ModelItem,
model: TrainedModelItem,
initialPipelineConfig: estypes.IngestPipeline | undefined
): InferecePipelineCreationState => ({
creatingPipeline: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@ import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n-react';
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';

import type { ModelItem } from '../models_list';
import type { TrainedModelItem } from '../../../../common/types/trained_models';
import { TestTrainedModelContent } from '../test_models/test_trained_model_content';
import { useMlKibana } from '../../contexts/kibana';
import { type InferecePipelineCreationState } from './state';

interface ContentProps {
model: ModelItem;
model: TrainedModelItem;
handlePipelineConfigUpdate: (configUpdate: Partial<InferecePipelineCreationState>) => void;
externalPipelineConfig?: estypes.IngestPipeline;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,15 @@ import {
EuiSpacer,
} from '@elastic/eui';
import { isPopulatedObject } from '@kbn/ml-is-populated-object';
import type { TrainedModelItem, TrainedModelUIItem } from '../../../common/types/trained_models';
import { isExistingModel } from '../../../common/types/trained_models';
import { type WithRequired } from '../../../common/types/common';
import { useTrainedModelsApiService } from '../services/ml_api_service/trained_models';
import { useToastNotificationService } from '../services/toast_notification_service';
import { DeleteSpaceAwareItemCheckModal } from '../components/delete_space_aware_item_check_modal';
import { type ModelItem } from './models_list';

interface DeleteModelsModalProps {
models: ModelItem[];
models: TrainedModelUIItem[];
onClose: (refreshList?: boolean) => void;
}

Expand All @@ -42,11 +43,14 @@ export const DeleteModelsModal: FC<DeleteModelsModalProps> = ({ models, onClose

const modelIds = models.map((m) => m.model_id);

const modelsWithPipelines = models.filter((m) => isPopulatedObject(m.pipelines)) as Array<
WithRequired<ModelItem, 'pipelines'>
>;
const modelsWithPipelines = models.filter(
(m): m is WithRequired<TrainedModelItem, 'pipelines'> =>
isExistingModel(m) && isPopulatedObject(m.pipelines)
);

const modelsWithInferenceAPIs = models.filter((m) => m.hasInferenceServices);
const modelsWithInferenceAPIs = models.filter(
(m): m is TrainedModelItem => isExistingModel(m) && !!m.hasInferenceServices
);

const inferenceAPIsIDs: string[] = modelsWithInferenceAPIs.flatMap((model) => {
return (model.inference_apis ?? []).map((inference) => inference.inference_id);
Expand Down
Loading

0 comments on commit 1f61f5b

Please sign in to comment.