diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/validation/datasets.ts b/x-pack/plugins/infra/common/http_api/log_analysis/validation/datasets.ts new file mode 100644 index 0000000000000..c9f98ac5fcdea --- /dev/null +++ b/x-pack/plugins/infra/common/http_api/log_analysis/validation/datasets.ts @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +export const LOG_ANALYSIS_VALIDATE_DATASETS_PATH = + '/api/infra/log_analysis/validation/log_entry_datasets'; + +/** + * Request types + */ +export const validateLogEntryDatasetsRequestPayloadRT = rt.type({ + data: rt.type({ + indices: rt.array(rt.string), + timestampField: rt.string, + startTime: rt.number, + endTime: rt.number, + }), +}); + +export type ValidateLogEntryDatasetsRequestPayload = rt.TypeOf< + typeof validateLogEntryDatasetsRequestPayloadRT +>; + +/** + * Response types + * */ +const logEntryDatasetsEntryRT = rt.strict({ + indexName: rt.string, + datasets: rt.array(rt.string), +}); + +export const validateLogEntryDatasetsResponsePayloadRT = rt.type({ + data: rt.type({ + datasets: rt.array(logEntryDatasetsEntryRT), + }), +}); + +export type ValidateLogEntryDatasetsResponsePayload = rt.TypeOf< + typeof validateLogEntryDatasetsResponsePayloadRT +>; diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/validation/index.ts b/x-pack/plugins/infra/common/http_api/log_analysis/validation/index.ts index f23ef7ee7c302..5f02f5598e6a4 100644 --- a/x-pack/plugins/infra/common/http_api/log_analysis/validation/index.ts +++ b/x-pack/plugins/infra/common/http_api/log_analysis/validation/index.ts @@ -4,4 +4,5 @@ * you may not use this file except in compliance with the Elastic License. */ +export * from './datasets'; export * from './log_entry_rate_indices'; diff --git a/x-pack/plugins/infra/common/log_analysis/job_parameters.ts b/x-pack/plugins/infra/common/log_analysis/job_parameters.ts index 94643e21f1ea6..7e10e45bbae4d 100644 --- a/x-pack/plugins/infra/common/log_analysis/job_parameters.ts +++ b/x-pack/plugins/infra/common/log_analysis/job_parameters.ts @@ -21,17 +21,73 @@ export const getJobId = (spaceId: string, sourceId: string, jobType: string) => export const getDatafeedId = (spaceId: string, sourceId: string, jobType: string) => `datafeed-${getJobId(spaceId, sourceId, jobType)}`; -export const jobSourceConfigurationRT = rt.type({ +export const datasetFilterRT = rt.union([ + rt.strict({ + type: rt.literal('includeAll'), + }), + rt.strict({ + type: rt.literal('includeSome'), + datasets: rt.array(rt.string), + }), +]); + +export type DatasetFilter = rt.TypeOf; + +export const jobSourceConfigurationRT = rt.partial({ indexPattern: rt.string, timestampField: rt.string, bucketSpan: rt.number, + datasetFilter: datasetFilterRT, }); export type JobSourceConfiguration = rt.TypeOf; export const jobCustomSettingsRT = rt.partial({ job_revision: rt.number, - logs_source_config: rt.partial(jobSourceConfigurationRT.props), + logs_source_config: jobSourceConfigurationRT, }); export type JobCustomSettings = rt.TypeOf; + +export const combineDatasetFilters = ( + firstFilter: DatasetFilter, + secondFilter: DatasetFilter +): DatasetFilter => { + if (firstFilter.type === 'includeAll' && secondFilter.type === 'includeAll') { + return { + type: 'includeAll', + }; + } + + const includedDatasets = new Set([ + ...(firstFilter.type === 'includeSome' ? firstFilter.datasets : []), + ...(secondFilter.type === 'includeSome' ? secondFilter.datasets : []), + ]); + + return { + type: 'includeSome', + datasets: [...includedDatasets], + }; +}; + +export const filterDatasetFilter = ( + datasetFilter: DatasetFilter, + predicate: (dataset: string) => boolean +): DatasetFilter => { + if (datasetFilter.type === 'includeAll') { + return datasetFilter; + } else { + const newDatasets = datasetFilter.datasets.filter(predicate); + + if (newDatasets.length > 0) { + return { + type: 'includeSome', + datasets: newDatasets, + }; + } else { + return { + type: 'includeAll', + }; + } + } +}; diff --git a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/analysis_setup_indices_form.tsx b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/analysis_setup_indices_form.tsx index 649858f657bfe..06dbf5315b83a 100644 --- a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/analysis_setup_indices_form.tsx +++ b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/analysis_setup_indices_form.tsx @@ -4,56 +4,41 @@ * you may not use this file except in compliance with the Elastic License. */ -import { EuiCode, EuiDescribedFormGroup, EuiFormRow, EuiCheckbox, EuiToolTip } from '@elastic/eui'; +import { EuiDescribedFormGroup, EuiFormRow } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; import { FormattedMessage } from '@kbn/i18n/react'; -import React, { useCallback, useMemo } from 'react'; - +import React, { useCallback } from 'react'; import { LoadingOverlayWrapper } from '../../../loading_overlay_wrapper'; -import { ValidatedIndex, ValidationIndicesUIError } from './validation'; +import { IndexSetupRow } from './index_setup_row'; +import { AvailableIndex } from './validation'; export const AnalysisSetupIndicesForm: React.FunctionComponent<{ disabled?: boolean; - indices: ValidatedIndex[]; + indices: AvailableIndex[]; isValidating: boolean; - onChangeSelectedIndices: (selectedIndices: ValidatedIndex[]) => void; + onChangeSelectedIndices: (selectedIndices: AvailableIndex[]) => void; valid: boolean; }> = ({ disabled = false, indices, isValidating, onChangeSelectedIndices, valid }) => { - const handleCheckboxChange = useCallback( - (event: React.ChangeEvent) => { + const changeIsIndexSelected = useCallback( + (indexName: string, isSelected: boolean) => { onChangeSelectedIndices( indices.map(index => { - const checkbox = event.currentTarget; - return index.name === checkbox.id ? { ...index, isSelected: checkbox.checked } : index; + return index.name === indexName ? { ...index, isSelected } : index; }) ); }, [indices, onChangeSelectedIndices] ); - const choices = useMemo( - () => - indices.map(index => { - const checkbox = ( - {index.name}} - onChange={handleCheckboxChange} - checked={index.validity === 'valid' && index.isSelected} - disabled={disabled || index.validity === 'invalid'} - /> - ); - - return index.validity === 'valid' ? ( - checkbox - ) : ( -
- {checkbox} -
- ); - }), - [disabled, handleCheckboxChange, indices] + const changeDatasetFilter = useCallback( + (indexName: string, datasetFilter) => { + onChangeSelectedIndices( + indices.map(index => { + return index.name === indexName ? { ...index, datasetFilter } : index; + }) + ); + }, + [indices, onChangeSelectedIndices] ); return ( @@ -69,13 +54,23 @@ export const AnalysisSetupIndicesForm: React.FunctionComponent<{ description={ } > - <>{choices} + <> + {indices.map(index => ( + + ))} + @@ -85,51 +80,3 @@ export const AnalysisSetupIndicesForm: React.FunctionComponent<{ const indicesSelectionLabel = i18n.translate('xpack.infra.analysisSetup.indicesSelectionLabel', { defaultMessage: 'Indices', }); - -const formatValidationError = (errors: ValidationIndicesUIError[]): React.ReactNode => { - return errors.map(error => { - switch (error.error) { - case 'INDEX_NOT_FOUND': - return ( -

- {error.index} }} - /> -

- ); - - case 'FIELD_NOT_FOUND': - return ( -

- {error.index}, - field: {error.field}, - }} - /> -

- ); - - case 'FIELD_NOT_VALID': - return ( -

- {error.index}, - field: {error.field}, - }} - /> -

- ); - - default: - return ''; - } - }); -}; diff --git a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/index_setup_dataset_filter.tsx b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/index_setup_dataset_filter.tsx new file mode 100644 index 0000000000000..b37c68f837876 --- /dev/null +++ b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/index_setup_dataset_filter.tsx @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { + EuiFilterButton, + EuiFilterGroup, + EuiPopover, + EuiPopoverTitle, + EuiSelectable, + EuiSelectableOption, +} from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; +import React, { useCallback, useMemo } from 'react'; +import { DatasetFilter } from '../../../../../common/log_analysis'; +import { useVisibilityState } from '../../../../utils/use_visibility_state'; + +export const IndexSetupDatasetFilter: React.FC<{ + availableDatasets: string[]; + datasetFilter: DatasetFilter; + isDisabled?: boolean; + onChangeDatasetFilter: (datasetFilter: DatasetFilter) => void; +}> = ({ availableDatasets, datasetFilter, isDisabled, onChangeDatasetFilter }) => { + const { isVisible, hide, show } = useVisibilityState(false); + + const changeDatasetFilter = useCallback( + (options: EuiSelectableOption[]) => { + const selectedDatasets = options + .filter(({ checked }) => checked === 'on') + .map(({ label }) => label); + + onChangeDatasetFilter( + selectedDatasets.length === 0 + ? { type: 'includeAll' } + : { type: 'includeSome', datasets: selectedDatasets } + ); + }, + [onChangeDatasetFilter] + ); + + const selectableOptions: EuiSelectableOption[] = useMemo( + () => + availableDatasets.map(datasetName => ({ + label: datasetName, + checked: + datasetFilter.type === 'includeSome' && datasetFilter.datasets.includes(datasetName) + ? 'on' + : undefined, + })), + [availableDatasets, datasetFilter] + ); + + const datasetFilterButton = ( + + + + ); + + return ( + + + + {(list, search) => ( +
+ {search} + {list} +
+ )} +
+
+
+ ); +}; diff --git a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/index_setup_row.tsx b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/index_setup_row.tsx new file mode 100644 index 0000000000000..18dc2e5aa9bd1 --- /dev/null +++ b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/index_setup_row.tsx @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { EuiCheckbox, EuiCode, EuiFlexGroup, EuiFlexItem, EuiIcon, EuiToolTip } from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; +import React, { useCallback } from 'react'; +import { DatasetFilter } from '../../../../../common/log_analysis'; +import { IndexSetupDatasetFilter } from './index_setup_dataset_filter'; +import { AvailableIndex, ValidationIndicesUIError } from './validation'; + +export const IndexSetupRow: React.FC<{ + index: AvailableIndex; + isDisabled: boolean; + onChangeDatasetFilter: (indexName: string, datasetFilter: DatasetFilter) => void; + onChangeIsSelected: (indexName: string, isSelected: boolean) => void; +}> = ({ index, isDisabled, onChangeDatasetFilter, onChangeIsSelected }) => { + const changeIsSelected = useCallback( + (event: React.ChangeEvent) => { + onChangeIsSelected(index.name, event.currentTarget.checked); + }, + [index.name, onChangeIsSelected] + ); + + const changeDatasetFilter = useCallback( + (datasetFilter: DatasetFilter) => onChangeDatasetFilter(index.name, datasetFilter), + [index.name, onChangeDatasetFilter] + ); + + const isSelected = index.validity === 'valid' && index.isSelected; + + return ( + + + {index.name}} + onChange={changeIsSelected} + checked={isSelected} + disabled={isDisabled || index.validity === 'invalid'} + /> + + + {index.validity === 'invalid' ? ( + + + + ) : index.validity === 'valid' ? ( + + ) : null} + + + ); +}; + +const formatValidationError = (errors: ValidationIndicesUIError[]): React.ReactNode => { + return errors.map(error => { + switch (error.error) { + case 'INDEX_NOT_FOUND': + return ( +

+ {error.index} }} + /> +

+ ); + + case 'FIELD_NOT_FOUND': + return ( +

+ {error.index}, + field: {error.field}, + }} + /> +

+ ); + + case 'FIELD_NOT_VALID': + return ( +

+ {error.index}, + field: {error.field}, + }} + /> +

+ ); + + default: + return ''; + } + }); +}; diff --git a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/initial_configuration_step.tsx b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/initial_configuration_step.tsx index 4ec895dfed4bc..85aa7ce513248 100644 --- a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/initial_configuration_step.tsx +++ b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/initial_configuration_step.tsx @@ -13,7 +13,7 @@ import React, { useMemo } from 'react'; import { SetupStatus } from '../../../../../common/log_analysis'; import { AnalysisSetupIndicesForm } from './analysis_setup_indices_form'; import { AnalysisSetupTimerangeForm } from './analysis_setup_timerange_form'; -import { ValidatedIndex, ValidationIndicesUIError } from './validation'; +import { AvailableIndex, ValidationIndicesUIError } from './validation'; interface InitialConfigurationStepProps { setStartTime: (startTime: number | undefined) => void; @@ -21,9 +21,9 @@ interface InitialConfigurationStepProps { startTime: number | undefined; endTime: number | undefined; isValidating: boolean; - validatedIndices: ValidatedIndex[]; + validatedIndices: AvailableIndex[]; setupStatus: SetupStatus; - setValidatedIndices: (selectedIndices: ValidatedIndex[]) => void; + setValidatedIndices: (selectedIndices: AvailableIndex[]) => void; validationErrors?: ValidationIndicesUIError[]; } diff --git a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/validation.tsx b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/validation.tsx index 8b733f66ef4a8..d69e544aeab18 100644 --- a/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/validation.tsx +++ b/x-pack/plugins/infra/public/components/logging/log_analysis_setup/initial_configuration_step/validation.tsx @@ -5,22 +5,35 @@ */ import { ValidationIndicesError } from '../../../../../common/http_api'; +import { DatasetFilter } from '../../../../../common/log_analysis'; + +export { ValidationIndicesError }; export type ValidationIndicesUIError = | ValidationIndicesError | { error: 'NETWORK_ERROR' } | { error: 'TOO_FEW_SELECTED_INDICES' }; -interface ValidIndex { +interface ValidAvailableIndex { validity: 'valid'; name: string; isSelected: boolean; + availableDatasets: string[]; + datasetFilter: DatasetFilter; } -interface InvalidIndex { +interface InvalidAvailableIndex { validity: 'invalid'; name: string; errors: ValidationIndicesError[]; } -export type ValidatedIndex = ValidIndex | InvalidIndex; +interface UnvalidatedAvailableIndex { + validity: 'unknown'; + name: string; +} + +export type AvailableIndex = + | ValidAvailableIndex + | InvalidAvailableIndex + | UnvalidatedAvailableIndex; diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts index b1265b389917e..7c8d63374924c 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts @@ -21,7 +21,8 @@ export const callSetupMlModuleAPI = async ( sourceId: string, indexPattern: string, jobOverrides: SetupMlModuleJobOverrides[] = [], - datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [] + datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [], + query?: object ) => { const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, { method: 'POST', @@ -34,6 +35,7 @@ export const callSetupMlModuleAPI = async ( startDatafeed: true, jobOverrides, datafeedOverrides, + query, }) ), }); @@ -60,13 +62,20 @@ const setupMlModuleDatafeedOverridesRT = rt.object; export type SetupMlModuleDatafeedOverrides = rt.TypeOf; -const setupMlModuleRequestParamsRT = rt.type({ - indexPatternName: rt.string, - prefix: rt.string, - startDatafeed: rt.boolean, - jobOverrides: rt.array(setupMlModuleJobOverridesRT), - datafeedOverrides: rt.array(setupMlModuleDatafeedOverridesRT), -}); +const setupMlModuleRequestParamsRT = rt.intersection([ + rt.strict({ + indexPatternName: rt.string, + prefix: rt.string, + startDatafeed: rt.boolean, + jobOverrides: rt.array(setupMlModuleJobOverridesRT), + datafeedOverrides: rt.array(setupMlModuleDatafeedOverridesRT), + }), + rt.exact( + rt.partial({ + query: rt.object, + }) + ), +]); const setupMlModuleRequestPayloadRT = rt.intersection([ setupMlModuleTimeParamsRT, diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts new file mode 100644 index 0000000000000..6c9d5e439d359 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { + LOG_ANALYSIS_VALIDATE_DATASETS_PATH, + validateLogEntryDatasetsRequestPayloadRT, + validateLogEntryDatasetsResponsePayloadRT, +} from '../../../../../common/http_api'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; +import { npStart } from '../../../../legacy_singletons'; + +export const callValidateDatasetsAPI = async ( + indices: string[], + timestampField: string, + startTime: number, + endTime: number +) => { + const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, { + method: 'POST', + body: JSON.stringify( + validateLogEntryDatasetsRequestPayloadRT.encode({ + data: { + endTime, + indices, + startTime, + timestampField, + }, + }) + ), + }); + + return decodeOrThrow(validateLogEntryDatasetsResponsePayloadRT)(response); +}; diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx index 99c5a3df7c9b1..cecfea28100ad 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx @@ -5,7 +5,7 @@ */ import { useCallback, useMemo } from 'react'; - +import { DatasetFilter } from '../../../../common/log_analysis'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useModuleStatus } from './log_analysis_module_status'; import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types'; @@ -48,10 +48,11 @@ export const useLogAnalysisModule = ({ createPromise: async ( selectedIndices: string[], start: number | undefined, - end: number | undefined + end: number | undefined, + datasetFilter: DatasetFilter ) => { dispatchModuleStatus({ type: 'startedSetup' }); - const setupResult = await moduleDescriptor.setUpModule(start, end, { + const setupResult = await moduleDescriptor.setUpModule(start, end, datasetFilter, { indices: selectedIndices, sourceId, spaceId, @@ -92,11 +93,16 @@ export const useLogAnalysisModule = ({ ]); const cleanUpAndSetUpModule = useCallback( - (selectedIndices: string[], start: number | undefined, end: number | undefined) => { + ( + selectedIndices: string[], + start: number | undefined, + end: number | undefined, + datasetFilter: DatasetFilter + ) => { dispatchModuleStatus({ type: 'startedSetup' }); cleanUpModule() .then(() => { - setUpModule(selectedIndices, start, end); + setUpModule(selectedIndices, start, end, datasetFilter); }) .catch(() => { dispatchModuleStatus({ type: 'failedSetup' }); diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts index dc9f25b492635..cc9ef73019844 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts @@ -8,7 +8,11 @@ import { DeleteJobsResponsePayload } from './api/ml_cleanup'; import { FetchJobStatusResponsePayload } from './api/ml_get_jobs_summary_api'; import { GetMlModuleResponsePayload } from './api/ml_get_module'; import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api'; -import { ValidationIndicesResponsePayload } from '../../../../common/http_api/log_analysis'; +import { + ValidationIndicesResponsePayload, + ValidateLogEntryDatasetsResponsePayload, +} from '../../../../common/http_api/log_analysis'; +import { DatasetFilter } from '../../../../common/log_analysis'; export interface ModuleDescriptor { moduleId: string; @@ -20,12 +24,20 @@ export interface ModuleDescriptor { setUpModule: ( start: number | undefined, end: number | undefined, + datasetFilter: DatasetFilter, sourceConfiguration: ModuleSourceConfiguration ) => Promise; cleanUpModule: (spaceId: string, sourceId: string) => Promise; validateSetupIndices: ( - sourceConfiguration: ModuleSourceConfiguration + indices: string[], + timestampField: string ) => Promise; + validateSetupDatasets: ( + indices: string[], + timestampField: string, + startTime: number, + endTime: number + ) => Promise; } export interface ModuleSourceConfiguration { diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts new file mode 100644 index 0000000000000..d46e8bc2485f6 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts @@ -0,0 +1,264 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { isEqual } from 'lodash'; +import { useCallback, useEffect, useMemo, useState } from 'react'; +import { usePrevious } from 'react-use'; +import { + combineDatasetFilters, + DatasetFilter, + filterDatasetFilter, + isExampleDataIndex, +} from '../../../../common/log_analysis'; +import { + AvailableIndex, + ValidationIndicesError, + ValidationIndicesUIError, +} from '../../../components/logging/log_analysis_setup/initial_configuration_step'; +import { useTrackedPromise } from '../../../utils/use_tracked_promise'; +import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types'; + +type SetupHandler = ( + indices: string[], + startTime: number | undefined, + endTime: number | undefined, + datasetFilter: DatasetFilter +) => void; + +interface AnalysisSetupStateArguments { + cleanUpAndSetUpModule: SetupHandler; + moduleDescriptor: ModuleDescriptor; + setUpModule: SetupHandler; + sourceConfiguration: ModuleSourceConfiguration; +} + +const fourWeeksInMs = 86400000 * 7 * 4; + +export const useAnalysisSetupState = ({ + cleanUpAndSetUpModule, + moduleDescriptor: { validateSetupDatasets, validateSetupIndices }, + setUpModule, + sourceConfiguration, +}: AnalysisSetupStateArguments) => { + const [startTime, setStartTime] = useState(Date.now() - fourWeeksInMs); + const [endTime, setEndTime] = useState(undefined); + + const [validatedIndices, setValidatedIndices] = useState( + sourceConfiguration.indices.map(indexName => ({ + name: indexName, + validity: 'unknown' as const, + })) + ); + + const updateIndicesWithValidationErrors = useCallback( + (validationErrors: ValidationIndicesError[]) => + setValidatedIndices(availableIndices => + availableIndices.map(previousAvailableIndex => { + const indexValiationErrors = validationErrors.filter( + ({ index }) => index === previousAvailableIndex.name + ); + + if (indexValiationErrors.length > 0) { + return { + validity: 'invalid', + name: previousAvailableIndex.name, + errors: indexValiationErrors, + }; + } else if (previousAvailableIndex.validity === 'valid') { + return { + ...previousAvailableIndex, + validity: 'valid', + errors: [], + }; + } else { + return { + validity: 'valid', + name: previousAvailableIndex.name, + isSelected: !isExampleDataIndex(previousAvailableIndex.name), + availableDatasets: [], + datasetFilter: { + type: 'includeAll' as const, + }, + }; + } + }) + ), + [] + ); + + const updateIndicesWithAvailableDatasets = useCallback( + (availableDatasets: Array<{ indexName: string; datasets: string[] }>) => + setValidatedIndices(availableIndices => + availableIndices.map(previousAvailableIndex => { + if (previousAvailableIndex.validity !== 'valid') { + return previousAvailableIndex; + } + + const availableDatasetsForIndex = availableDatasets.filter( + ({ indexName }) => indexName === previousAvailableIndex.name + ); + const newAvailableDatasets = availableDatasetsForIndex.flatMap( + ({ datasets }) => datasets + ); + + // filter out datasets that have disappeared if this index' datasets were updated + const newDatasetFilter: DatasetFilter = + availableDatasetsForIndex.length > 0 + ? filterDatasetFilter(previousAvailableIndex.datasetFilter, dataset => + newAvailableDatasets.includes(dataset) + ) + : previousAvailableIndex.datasetFilter; + + return { + ...previousAvailableIndex, + availableDatasets: newAvailableDatasets, + datasetFilter: newDatasetFilter, + }; + }) + ), + [] + ); + + const validIndexNames = useMemo( + () => validatedIndices.filter(index => index.validity === 'valid').map(index => index.name), + [validatedIndices] + ); + + const selectedIndexNames = useMemo( + () => + validatedIndices + .filter(index => index.validity === 'valid' && index.isSelected) + .map(i => i.name), + [validatedIndices] + ); + + const datasetFilter = useMemo( + () => + validatedIndices + .flatMap(validatedIndex => + validatedIndex.validity === 'valid' + ? validatedIndex.datasetFilter + : { type: 'includeAll' as const } + ) + .reduce(combineDatasetFilters, { type: 'includeAll' as const }), + [validatedIndices] + ); + + const [validateIndicesRequest, validateIndices] = useTrackedPromise( + { + cancelPreviousOn: 'resolution', + createPromise: async () => { + return await validateSetupIndices( + sourceConfiguration.indices, + sourceConfiguration.timestampField + ); + }, + onResolve: ({ data: { errors } }) => { + updateIndicesWithValidationErrors(errors); + }, + onReject: () => { + setValidatedIndices([]); + }, + }, + [sourceConfiguration.indices, sourceConfiguration.timestampField] + ); + + const [validateDatasetsRequest, validateDatasets] = useTrackedPromise( + { + cancelPreviousOn: 'resolution', + createPromise: async () => { + if (validIndexNames.length === 0) { + return { data: { datasets: [] } }; + } + + return await validateSetupDatasets( + validIndexNames, + sourceConfiguration.timestampField, + startTime ?? 0, + endTime ?? Date.now() + ); + }, + onResolve: ({ data: { datasets } }) => { + updateIndicesWithAvailableDatasets(datasets); + }, + }, + [validIndexNames, sourceConfiguration.timestampField, startTime, endTime] + ); + + const setUp = useCallback(() => { + return setUpModule(selectedIndexNames, startTime, endTime, datasetFilter); + }, [setUpModule, selectedIndexNames, startTime, endTime, datasetFilter]); + + const cleanUpAndSetUp = useCallback(() => { + return cleanUpAndSetUpModule(selectedIndexNames, startTime, endTime, datasetFilter); + }, [cleanUpAndSetUpModule, selectedIndexNames, startTime, endTime, datasetFilter]); + + const isValidating = useMemo( + () => validateIndicesRequest.state === 'pending' || validateDatasetsRequest.state === 'pending', + [validateDatasetsRequest.state, validateIndicesRequest.state] + ); + + const validationErrors = useMemo(() => { + if (isValidating) { + return []; + } + + if (validateIndicesRequest.state === 'rejected') { + return [{ error: 'NETWORK_ERROR' }]; + } + + if (selectedIndexNames.length === 0) { + return [{ error: 'TOO_FEW_SELECTED_INDICES' }]; + } + + return validatedIndices.reduce((errors, index) => { + return index.validity === 'invalid' && selectedIndexNames.includes(index.name) + ? [...errors, ...index.errors] + : errors; + }, []); + }, [isValidating, validateIndicesRequest.state, selectedIndexNames, validatedIndices]); + + const prevStartTime = usePrevious(startTime); + const prevEndTime = usePrevious(endTime); + const prevValidIndexNames = usePrevious(validIndexNames); + + useEffect(() => { + validateIndices(); + }, [validateIndices]); + + useEffect(() => { + if ( + startTime !== prevStartTime || + endTime !== prevEndTime || + !isEqual(validIndexNames, prevValidIndexNames) + ) { + validateDatasets(); + } + }, [ + endTime, + prevEndTime, + prevStartTime, + prevValidIndexNames, + startTime, + validIndexNames, + validateDatasets, + ]); + + return { + cleanUpAndSetUp, + datasetFilter, + endTime, + isValidating, + selectedIndexNames, + setEndTime, + setStartTime, + setUp, + startTime, + validatedIndices, + setValidatedIndices, + validationErrors, + }; +}; diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.tsx deleted file mode 100644 index 9f966ed3342e6..0000000000000 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.tsx +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { useCallback, useEffect, useMemo, useState } from 'react'; - -import { isExampleDataIndex } from '../../../../common/log_analysis'; -import { - ValidatedIndex, - ValidationIndicesUIError, -} from '../../../components/logging/log_analysis_setup/initial_configuration_step'; -import { useTrackedPromise } from '../../../utils/use_tracked_promise'; -import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types'; - -type SetupHandler = ( - indices: string[], - startTime: number | undefined, - endTime: number | undefined -) => void; - -interface AnalysisSetupStateArguments { - cleanUpAndSetUpModule: SetupHandler; - moduleDescriptor: ModuleDescriptor; - setUpModule: SetupHandler; - sourceConfiguration: ModuleSourceConfiguration; -} - -const fourWeeksInMs = 86400000 * 7 * 4; - -export const useAnalysisSetupState = ({ - cleanUpAndSetUpModule, - moduleDescriptor: { validateSetupIndices }, - setUpModule, - sourceConfiguration, -}: AnalysisSetupStateArguments) => { - const [startTime, setStartTime] = useState(Date.now() - fourWeeksInMs); - const [endTime, setEndTime] = useState(undefined); - - const [validatedIndices, setValidatedIndices] = useState([]); - - const [validateIndicesRequest, validateIndices] = useTrackedPromise( - { - cancelPreviousOn: 'resolution', - createPromise: async () => { - return await validateSetupIndices(sourceConfiguration); - }, - onResolve: ({ data: { errors } }) => { - setValidatedIndices(previousValidatedIndices => - sourceConfiguration.indices.map(indexName => { - const previousValidatedIndex = previousValidatedIndices.filter( - ({ name }) => name === indexName - )[0]; - const indexValiationErrors = errors.filter(({ index }) => index === indexName); - if (indexValiationErrors.length > 0) { - return { - validity: 'invalid', - name: indexName, - errors: indexValiationErrors, - }; - } else { - return { - validity: 'valid', - name: indexName, - isSelected: - previousValidatedIndex?.validity === 'valid' - ? previousValidatedIndex?.isSelected - : !isExampleDataIndex(indexName), - }; - } - }) - ); - }, - onReject: () => { - setValidatedIndices([]); - }, - }, - [sourceConfiguration.indices] - ); - - useEffect(() => { - validateIndices(); - }, [validateIndices]); - - const selectedIndexNames = useMemo( - () => - validatedIndices - .filter(index => index.validity === 'valid' && index.isSelected) - .map(i => i.name), - [validatedIndices] - ); - - const setUp = useCallback(() => { - return setUpModule(selectedIndexNames, startTime, endTime); - }, [setUpModule, selectedIndexNames, startTime, endTime]); - - const cleanUpAndSetUp = useCallback(() => { - return cleanUpAndSetUpModule(selectedIndexNames, startTime, endTime); - }, [cleanUpAndSetUpModule, selectedIndexNames, startTime, endTime]); - - const isValidating = useMemo( - () => - validateIndicesRequest.state === 'pending' || - validateIndicesRequest.state === 'uninitialized', - [validateIndicesRequest.state] - ); - - const validationErrors = useMemo(() => { - if (isValidating) { - return []; - } - - if (validateIndicesRequest.state === 'rejected') { - return [{ error: 'NETWORK_ERROR' }]; - } - - if (selectedIndexNames.length === 0) { - return [{ error: 'TOO_FEW_SELECTED_INDICES' }]; - } - - return validatedIndices.reduce((errors, index) => { - return index.validity === 'invalid' && selectedIndexNames.includes(index.name) - ? [...errors, ...index.errors] - : errors; - }, []); - }, [isValidating, validateIndicesRequest.state, selectedIndexNames, validatedIndices]); - - return { - cleanUpAndSetUp, - endTime, - isValidating, - selectedIndexNames, - setEndTime, - setStartTime, - setUp, - startTime, - validatedIndices, - setValidatedIndices, - validationErrors, - }; -}; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/module_descriptor.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/module_descriptor.ts index be7547f2e74cb..45cdd28bd943b 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/module_descriptor.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/module_descriptor.ts @@ -7,20 +7,21 @@ import { bucketSpan, categoriesMessageField, + DatasetFilter, getJobId, LogEntryCategoriesJobType, logEntryCategoriesJobTypes, partitionField, } from '../../../../common/log_analysis'; - import { + cleanUpJobsAndDatafeeds, ModuleDescriptor, ModuleSourceConfiguration, - cleanUpJobsAndDatafeeds, } from '../../../containers/logs/log_analysis'; import { callJobsSummaryAPI } from '../../../containers/logs/log_analysis/api/ml_get_jobs_summary_api'; import { callGetMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_get_module'; import { callSetupMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_setup_module_api'; +import { callValidateDatasetsAPI } from '../../../containers/logs/log_analysis/api/validate_datasets'; import { callValidateIndicesAPI } from '../../../containers/logs/log_analysis/api/validate_indices'; const moduleId = 'logs_ui_categories'; @@ -48,6 +49,7 @@ const getModuleDefinition = async () => { const setUpModule = async ( start: number | undefined, end: number | undefined, + datasetFilter: DatasetFilter, { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration ) => { const indexNamePattern = indices.join(','); @@ -65,10 +67,31 @@ const setUpModule = async ( indexPattern: indexNamePattern, timestampField, bucketSpan, + datasetFilter, }, }, }, ]; + const query = { + bool: { + filter: [ + ...(datasetFilter.type === 'includeSome' + ? [ + { + terms: { + 'event.dataset': datasetFilter.datasets, + }, + }, + ] + : []), + { + exists: { + field: 'message', + }, + }, + ], + }, + }; return callSetupMlModuleAPI( moduleId, @@ -77,7 +100,9 @@ const setUpModule = async ( spaceId, sourceId, indexNamePattern, - jobOverrides + jobOverrides, + [], + query ); }; @@ -85,7 +110,7 @@ const cleanUpModule = async (spaceId: string, sourceId: string) => { return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes); }; -const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceConfiguration) => { +const validateSetupIndices = async (indices: string[], timestampField: string) => { return await callValidateIndicesAPI(indices, [ { name: timestampField, @@ -102,6 +127,15 @@ const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceCon ]); }; +const validateSetupDatasets = async ( + indices: string[], + timestampField: string, + startTime: number, + endTime: number +) => { + return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); +}; + export const logEntryCategoriesModule: ModuleDescriptor = { moduleId, jobTypes: logEntryCategoriesJobTypes, @@ -111,5 +145,6 @@ export const logEntryCategoriesModule: ModuleDescriptor { createProcessStep({ cleanUpAndSetUp, errorMessages: lastSetupErrorMessages, - isConfigurationValid: validationErrors.length <= 0, + isConfigurationValid: validationErrors.length <= 0 && !isValidating, setUp, setupStatus, viewResults, diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/module_descriptor.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/module_descriptor.ts index 52ba3101dbc38..dfd427138aaa6 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/module_descriptor.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/module_descriptor.ts @@ -6,20 +6,21 @@ import { bucketSpan, + DatasetFilter, getJobId, LogEntryRateJobType, logEntryRateJobTypes, partitionField, } from '../../../../common/log_analysis'; - import { + cleanUpJobsAndDatafeeds, ModuleDescriptor, ModuleSourceConfiguration, - cleanUpJobsAndDatafeeds, } from '../../../containers/logs/log_analysis'; import { callJobsSummaryAPI } from '../../../containers/logs/log_analysis/api/ml_get_jobs_summary_api'; import { callGetMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_get_module'; import { callSetupMlModuleAPI } from '../../../containers/logs/log_analysis/api/ml_setup_module_api'; +import { callValidateDatasetsAPI } from '../../../containers/logs/log_analysis/api/validate_datasets'; import { callValidateIndicesAPI } from '../../../containers/logs/log_analysis/api/validate_indices'; const moduleId = 'logs_ui_analysis'; @@ -47,6 +48,7 @@ const getModuleDefinition = async () => { const setUpModule = async ( start: number | undefined, end: number | undefined, + datasetFilter: DatasetFilter, { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration ) => { const indexNamePattern = indices.join(','); @@ -68,6 +70,20 @@ const setUpModule = async ( }, }, ]; + const query = + datasetFilter.type === 'includeSome' + ? { + bool: { + filter: [ + { + terms: { + 'event.dataset': datasetFilter.datasets, + }, + }, + ], + }, + } + : undefined; return callSetupMlModuleAPI( moduleId, @@ -76,7 +92,9 @@ const setUpModule = async ( spaceId, sourceId, indexNamePattern, - jobOverrides + jobOverrides, + [], + query ); }; @@ -84,7 +102,7 @@ const cleanUpModule = async (spaceId: string, sourceId: string) => { return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes); }; -const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceConfiguration) => { +const validateSetupIndices = async (indices: string[], timestampField: string) => { return await callValidateIndicesAPI(indices, [ { name: timestampField, @@ -97,6 +115,15 @@ const validateSetupIndices = async ({ indices, timestampField }: ModuleSourceCon ]); }; +const validateSetupDatasets = async ( + indices: string[], + timestampField: string, + startTime: number, + endTime: number +) => { + return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); +}; + export const logEntryRateModule: ModuleDescriptor = { moduleId, jobTypes: logEntryRateJobTypes, @@ -106,5 +133,6 @@ export const logEntryRateModule: ModuleDescriptor = { getModuleDefinition, setUpModule, cleanUpModule, + validateSetupDatasets, validateSetupIndices, }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_setup_content.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_setup_content.tsx index a02dbfa941588..e5c439808115d 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_setup_content.tsx +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_setup_content.tsx @@ -55,7 +55,7 @@ export const LogEntryRateSetupContent: React.FunctionComponent = () => { createProcessStep({ cleanUpAndSetUp, errorMessages: lastSetupErrorMessages, - isConfigurationValid: validationErrors.length <= 0, + isConfigurationValid: validationErrors.length <= 0 && !isValidating, setUp, setupStatus, viewResults, diff --git a/x-pack/plugins/infra/server/infra_server.ts b/x-pack/plugins/infra/server/infra_server.ts index 4ed30380dc164..06135c6532d77 100644 --- a/x-pack/plugins/infra/server/infra_server.ts +++ b/x-pack/plugins/infra/server/infra_server.ts @@ -15,6 +15,7 @@ import { initGetLogEntryCategoryDatasetsRoute, initGetLogEntryCategoryExamplesRoute, initGetLogEntryRateRoute, + initValidateLogAnalysisDatasetsRoute, initValidateLogAnalysisIndicesRoute, } from './routes/log_analysis'; import { initMetricExplorerRoute } from './routes/metrics_explorer'; @@ -51,6 +52,7 @@ export const initInfraServer = (libs: InfraBackendLibs) => { initSnapshotRoute(libs); initNodeDetailsRoute(libs); initSourceRoute(libs); + initValidateLogAnalysisDatasetsRoute(libs); initValidateLogAnalysisIndicesRoute(libs); initLogEntriesRoute(libs); initLogEntriesHighlightsRoute(libs); diff --git a/x-pack/plugins/infra/server/lib/compose/kibana.ts b/x-pack/plugins/infra/server/lib/compose/kibana.ts index f100726b5b92e..fc4732b8ca474 100644 --- a/x-pack/plugins/infra/server/lib/compose/kibana.ts +++ b/x-pack/plugins/infra/server/lib/compose/kibana.ts @@ -38,6 +38,7 @@ export function compose(core: CoreSetup, config: InfraConfig, plugins: InfraServ sources, }), logEntries: new InfraLogEntriesDomain(new InfraKibanaLogEntriesAdapter(framework), { + framework, sources, }), metrics: new InfraMetricsDomain(new KibanaMetricsAdapter(framework)), diff --git a/x-pack/plugins/infra/server/lib/domains/log_entries_domain/log_entries_domain.ts b/x-pack/plugins/infra/server/lib/domains/log_entries_domain/log_entries_domain.ts index 07bc965dda77a..15bfbce6d512e 100644 --- a/x-pack/plugins/infra/server/lib/domains/log_entries_domain/log_entries_domain.ts +++ b/x-pack/plugins/infra/server/lib/domains/log_entries_domain/log_entries_domain.ts @@ -29,6 +29,14 @@ import { Highlights, compileFormattingRules, } from './message'; +import { KibanaFramework } from '../../adapters/framework/kibana_framework_adapter'; +import { decodeOrThrow } from '../../../../common/runtime_types'; +import { + logEntryDatasetsResponseRT, + LogEntryDatasetBucket, + CompositeDatasetKey, + createLogEntryDatasetsQuery, +} from './queries/log_entry_datasets'; export interface LogEntriesParams { startTimestamp: number; @@ -51,10 +59,15 @@ export const LOG_ENTRIES_PAGE_SIZE = 200; const FIELDS_FROM_CONTEXT = ['log.file.path', 'host.name', 'container.id'] as const; +const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; + export class InfraLogEntriesDomain { constructor( private readonly adapter: LogEntriesAdapter, - private readonly libs: { sources: InfraSources } + private readonly libs: { + framework: KibanaFramework; + sources: InfraSources; + } ) {} public async getLogEntriesAround( @@ -256,6 +269,45 @@ export class InfraLogEntriesDomain { ), }; } + + public async getLogEntryDatasets( + requestContext: RequestHandlerContext, + timestampField: string, + indexName: string, + startTime: number, + endTime: number + ) { + let datasetBuckets: LogEntryDatasetBucket[] = []; + let afterLatestBatchKey: CompositeDatasetKey | undefined; + + while (true) { + const datasetsReponse = await this.libs.framework.callWithRequest( + requestContext, + 'search', + createLogEntryDatasetsQuery( + indexName, + timestampField, + startTime, + endTime, + COMPOSITE_AGGREGATION_BATCH_SIZE, + afterLatestBatchKey + ) + ); + + const { after_key: afterKey, buckets: latestBatchBuckets } = decodeOrThrow( + logEntryDatasetsResponseRT + )(datasetsReponse).aggregations.dataset_buckets; + + datasetBuckets = [...datasetBuckets, ...latestBatchBuckets]; + afterLatestBatchKey = afterKey; + + if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) { + break; + } + } + + return datasetBuckets.map(({ key: { dataset } }) => dataset); + } } interface LogItemHit { diff --git a/x-pack/plugins/infra/server/lib/domains/log_entries_domain/queries/log_entry_datasets.ts b/x-pack/plugins/infra/server/lib/domains/log_entries_domain/queries/log_entry_datasets.ts new file mode 100644 index 0000000000000..1df7072904f68 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/domains/log_entries_domain/queries/log_entry_datasets.ts @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +import { commonSearchSuccessResponseFieldsRT } from '../../../../utils/elasticsearch_runtime_types'; + +export const createLogEntryDatasetsQuery = ( + indexName: string, + timestampField: string, + startTime: number, + endTime: number, + size: number, + afterKey?: CompositeDatasetKey +) => ({ + ...defaultRequestParameters, + body: { + query: { + bool: { + filter: [ + { + range: { + [timestampField]: { + gte: startTime, + lte: endTime, + }, + }, + }, + { + exists: { + field: 'event.dataset', + }, + }, + ], + }, + }, + aggs: { + dataset_buckets: { + composite: { + after: afterKey, + size, + sources: [ + { + dataset: { + terms: { + field: 'event.dataset', + order: 'asc', + }, + }, + }, + ], + }, + }, + }, + }, + index: indexName, + size: 0, +}); + +const defaultRequestParameters = { + allowNoIndices: true, + ignoreUnavailable: true, + trackScores: false, + trackTotalHits: false, +}; + +const compositeDatasetKeyRT = rt.type({ + dataset: rt.string, +}); + +export type CompositeDatasetKey = rt.TypeOf; + +const logEntryDatasetBucketRT = rt.type({ + key: compositeDatasetKeyRT, +}); + +export type LogEntryDatasetBucket = rt.TypeOf; + +export const logEntryDatasetsResponseRT = rt.intersection([ + commonSearchSuccessResponseFieldsRT, + rt.type({ + aggregations: rt.type({ + dataset_buckets: rt.intersection([ + rt.type({ + buckets: rt.array(logEntryDatasetBucketRT), + }), + rt.partial({ + after_key: compositeDatasetKeyRT, + }), + ]), + }), + }), +]); + +export type LogEntryDatasetsResponse = rt.TypeOf; diff --git a/x-pack/plugins/infra/server/plugin.ts b/x-pack/plugins/infra/server/plugin.ts index d4dfa60ac67a0..2dbc93668fdf8 100644 --- a/x-pack/plugins/infra/server/plugin.ts +++ b/x-pack/plugins/infra/server/plugin.ts @@ -119,6 +119,7 @@ export class InfraServerPlugin { sources, }), logEntries: new InfraLogEntriesDomain(new InfraKibanaLogEntriesAdapter(framework), { + framework, sources, }), metrics: new InfraMetricsDomain(new KibanaMetricsAdapter(framework)), diff --git a/x-pack/plugins/infra/server/routes/log_analysis/validation/datasets.ts b/x-pack/plugins/infra/server/routes/log_analysis/validation/datasets.ts new file mode 100644 index 0000000000000..d772c000986fc --- /dev/null +++ b/x-pack/plugins/infra/server/routes/log_analysis/validation/datasets.ts @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import Boom from 'boom'; + +import { InfraBackendLibs } from '../../../lib/infra_types'; +import { + LOG_ANALYSIS_VALIDATE_DATASETS_PATH, + validateLogEntryDatasetsRequestPayloadRT, + validateLogEntryDatasetsResponsePayloadRT, +} from '../../../../common/http_api'; + +import { createValidationFunction } from '../../../../common/runtime_types'; + +export const initValidateLogAnalysisDatasetsRoute = ({ + framework, + logEntries, +}: InfraBackendLibs) => { + framework.registerRoute( + { + method: 'post', + path: LOG_ANALYSIS_VALIDATE_DATASETS_PATH, + validate: { + body: createValidationFunction(validateLogEntryDatasetsRequestPayloadRT), + }, + }, + framework.router.handleLegacyErrors(async (requestContext, request, response) => { + try { + const { + data: { indices, timestampField, startTime, endTime }, + } = request.body; + + const datasets = await Promise.all( + indices.map(async indexName => { + const indexDatasets = await logEntries.getLogEntryDatasets( + requestContext, + timestampField, + indexName, + startTime, + endTime + ); + + return { + indexName, + datasets: indexDatasets, + }; + }) + ); + + return response.ok({ + body: validateLogEntryDatasetsResponsePayloadRT.encode({ data: { datasets } }), + }); + } catch (error) { + if (Boom.isBoom(error)) { + throw error; + } + + return response.customError({ + statusCode: error.statusCode ?? 500, + body: { + message: error.message ?? 'An unexpected error occurred', + }, + }); + } + }) + ); +}; diff --git a/x-pack/plugins/infra/server/routes/log_analysis/validation/index.ts b/x-pack/plugins/infra/server/routes/log_analysis/validation/index.ts index 727faca69298e..10c39f9552a3a 100644 --- a/x-pack/plugins/infra/server/routes/log_analysis/validation/index.ts +++ b/x-pack/plugins/infra/server/routes/log_analysis/validation/index.ts @@ -4,4 +4,5 @@ * you may not use this file except in compliance with the Elastic License. */ +export * from './datasets'; export * from './indices'; diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/logs_ui_categories/ml/datafeed_log_entry_categories_count.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/logs_ui_categories/ml/datafeed_log_entry_categories_count.json index 6e117b4de87ea..2ece259e2bb45 100644 --- a/x-pack/plugins/ml/server/models/data_recognizer/modules/logs_ui_categories/ml/datafeed_log_entry_categories_count.json +++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/logs_ui_categories/ml/datafeed_log_entry_categories_count.json @@ -1,15 +1,4 @@ { "job_id": "JOB_ID", - "indices": ["INDEX_PATTERN_NAME"], - "query": { - "bool": { - "filter": [ - { - "exists": { - "field": "message" - } - } - ] - } - } + "indices": ["INDEX_PATTERN_NAME"] }