Skip to content

Commit

Permalink
Merge branch 'master' into super-select
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine authored Jul 27, 2020
2 parents 8891761 + 6d4bb9d commit 2a67704
Show file tree
Hide file tree
Showing 46 changed files with 1,242 additions and 322 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import {
} from '../../../context/ApmPluginContext/MockApmPluginContext';

const setBreadcrumbs = jest.fn();
const changeTitle = jest.fn();

function mountBreadcrumb(route: string, params = '') {
mount(
Expand All @@ -27,6 +28,7 @@ function mountBreadcrumb(route: string, params = '') {
...mockApmPluginContextValue.core,
chrome: {
...mockApmPluginContextValue.core.chrome,
docTitle: { change: changeTitle },
setBreadcrumbs,
},
},
Expand All @@ -42,23 +44,14 @@ function mountBreadcrumb(route: string, params = '') {
}

describe('UpdateBreadcrumbs', () => {
let realDoc: Document;

beforeEach(() => {
realDoc = window.document;
(window.document as any) = {
title: 'Kibana',
};
setBreadcrumbs.mockReset();
changeTitle.mockReset();
});

afterEach(() => {
(window.document as any) = realDoc;
});

it('Homepage', () => {
it('Changes the homepage title', () => {
mountBreadcrumb('/');
expect(window.document.title).toMatchInlineSnapshot(`"APM"`);
expect(changeTitle).toHaveBeenCalledWith(['APM']);
});

it('/services/:serviceName/errors/:groupId', () => {
Expand Down Expand Up @@ -90,9 +83,13 @@ describe('UpdateBreadcrumbs', () => {
},
{ text: 'myGroupId', href: undefined },
]);
expect(window.document.title).toMatchInlineSnapshot(
`"myGroupId | Errors | opbeans-node | Services | APM"`
);
expect(changeTitle).toHaveBeenCalledWith([
'myGroupId',
'Errors',
'opbeans-node',
'Services',
'APM',
]);
});

it('/services/:serviceName/errors', () => {
Expand All @@ -104,9 +101,12 @@ describe('UpdateBreadcrumbs', () => {
{ text: 'opbeans-node', href: '#/services/opbeans-node?kuery=myKuery' },
{ text: 'Errors', href: undefined },
]);
expect(window.document.title).toMatchInlineSnapshot(
`"Errors | opbeans-node | Services | APM"`
);
expect(changeTitle).toHaveBeenCalledWith([
'Errors',
'opbeans-node',
'Services',
'APM',
]);
});

it('/services/:serviceName/transactions', () => {
Expand All @@ -118,9 +118,12 @@ describe('UpdateBreadcrumbs', () => {
{ text: 'opbeans-node', href: '#/services/opbeans-node?kuery=myKuery' },
{ text: 'Transactions', href: undefined },
]);
expect(window.document.title).toMatchInlineSnapshot(
`"Transactions | opbeans-node | Services | APM"`
);
expect(changeTitle).toHaveBeenCalledWith([
'Transactions',
'opbeans-node',
'Services',
'APM',
]);
});

it('/services/:serviceName/transactions/view?transactionName=my-transaction-name', () => {
Expand All @@ -139,8 +142,12 @@ describe('UpdateBreadcrumbs', () => {
},
{ text: 'my-transaction-name', href: undefined },
]);
expect(window.document.title).toMatchInlineSnapshot(
`"my-transaction-name | Transactions | opbeans-node | Services | APM"`
);
expect(changeTitle).toHaveBeenCalledWith([
'my-transaction-name',
'Transactions',
'opbeans-node',
'Services',
'APM',
]);
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,7 @@ interface Props {
}

function getTitleFromBreadCrumbs(breadcrumbs: Breadcrumb[]) {
return breadcrumbs
.map(({ value }) => value)
.reverse()
.join(' | ');
return breadcrumbs.map(({ value }) => value).reverse();
}

class UpdateBreadcrumbsComponent extends React.Component<Props> {
Expand All @@ -43,7 +40,9 @@ class UpdateBreadcrumbsComponent extends React.Component<Props> {
}
);

document.title = getTitleFromBreadCrumbs(this.props.breadcrumbs);
this.props.core.chrome.docTitle.change(
getTitleFromBreadCrumbs(this.props.breadcrumbs)
);
this.props.core.chrome.setBreadcrumbs(breadcrumbs);
}

Expand Down
13 changes: 2 additions & 11 deletions x-pack/plugins/infra/server/lib/log_analysis/common.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ import {
logEntryDatasetsResponseRT,
} from './queries/log_entry_data_sets';
import { decodeOrThrow } from '../../../common/runtime_types';
import { NoLogAnalysisResultsIndexError } from './errors';
import { startTracingSpan, TracingSpan } from '../../../common/performance_tracing';

export async function fetchMlJob(mlAnomalyDetectors: MlAnomalyDetectors, jobId: string) {
Expand Down Expand Up @@ -67,16 +66,8 @@ export async function getLogEntryDatasets(
)
);

if (logEntryDatasetsResponse._shards.total === 0) {
throw new NoLogAnalysisResultsIndexError(
`Failed to find ml indices for jobs: ${jobIds.join(', ')}.`
);
}

const {
after_key: afterKey,
buckets: latestBatchBuckets,
} = logEntryDatasetsResponse.aggregations.dataset_buckets;
const { after_key: afterKey, buckets: latestBatchBuckets = [] } =
logEntryDatasetsResponse.aggregations?.dataset_buckets ?? {};

logEntryDatasetBuckets = [...logEntryDatasetBuckets, ...latestBatchBuckets];
afterLatestBatchKey = afterKey;
Expand Down
7 changes: 0 additions & 7 deletions x-pack/plugins/infra/server/lib/log_analysis/errors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,6 @@

/* eslint-disable max-classes-per-file */

export class NoLogAnalysisResultsIndexError extends Error {
constructor(message?: string) {
super(message);
Object.setPrototypeOf(this, new.target.prototype);
}
}

export class NoLogAnalysisMlJobError extends Error {
constructor(message?: string) {
super(message);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,7 @@ import {
import { startTracingSpan } from '../../../common/performance_tracing';
import { decodeOrThrow } from '../../../common/runtime_types';
import type { MlAnomalyDetectors, MlSystem } from '../../types';
import {
InsufficientLogAnalysisMlJobConfigurationError,
NoLogAnalysisResultsIndexError,
UnknownCategoryError,
} from './errors';
import { InsufficientLogAnalysisMlJobConfigurationError, UnknownCategoryError } from './errors';
import {
createLogEntryCategoriesQuery,
logEntryCategoriesResponseRT,
Expand Down Expand Up @@ -235,38 +231,33 @@ async function fetchTopLogEntryCategories(

const esSearchSpan = finalizeEsSearchSpan();

if (topLogEntryCategoriesResponse._shards.total === 0) {
throw new NoLogAnalysisResultsIndexError(
`Failed to find ml result index for job ${logEntryCategoriesCountJobId}.`
);
}

const topLogEntryCategories = topLogEntryCategoriesResponse.aggregations.terms_category_id.buckets.map(
(topCategoryBucket) => {
const maximumAnomalyScoresByDataset = topCategoryBucket.filter_record.terms_dataset.buckets.reduce<
Record<string, number>
>(
(accumulatedMaximumAnomalyScores, datasetFromRecord) => ({
...accumulatedMaximumAnomalyScores,
[datasetFromRecord.key]: datasetFromRecord.maximum_record_score.value ?? 0,
}),
{}
);

return {
categoryId: parseCategoryId(topCategoryBucket.key),
logEntryCount: topCategoryBucket.filter_model_plot.sum_actual.value ?? 0,
datasets: topCategoryBucket.filter_model_plot.terms_dataset.buckets
.map((datasetBucket) => ({
name: datasetBucket.key,
maximumAnomalyScore: maximumAnomalyScoresByDataset[datasetBucket.key] ?? 0,
}))
.sort(compareDatasetsByMaximumAnomalyScore)
.reverse(),
maximumAnomalyScore: topCategoryBucket.filter_record.maximum_record_score.value ?? 0,
};
}
);
const topLogEntryCategories =
topLogEntryCategoriesResponse.aggregations?.terms_category_id.buckets.map(
(topCategoryBucket) => {
const maximumAnomalyScoresByDataset = topCategoryBucket.filter_record.terms_dataset.buckets.reduce<
Record<string, number>
>(
(accumulatedMaximumAnomalyScores, datasetFromRecord) => ({
...accumulatedMaximumAnomalyScores,
[datasetFromRecord.key]: datasetFromRecord.maximum_record_score.value ?? 0,
}),
{}
);

return {
categoryId: parseCategoryId(topCategoryBucket.key),
logEntryCount: topCategoryBucket.filter_model_plot.sum_actual.value ?? 0,
datasets: topCategoryBucket.filter_model_plot.terms_dataset.buckets
.map((datasetBucket) => ({
name: datasetBucket.key,
maximumAnomalyScore: maximumAnomalyScoresByDataset[datasetBucket.key] ?? 0,
}))
.sort(compareDatasetsByMaximumAnomalyScore)
.reverse(),
maximumAnomalyScore: topCategoryBucket.filter_record.maximum_record_score.value ?? 0,
};
}
) ?? [];

return {
topLogEntryCategories,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,14 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { pipe } from 'fp-ts/lib/pipeable';
import { map, fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
import { throwErrors, createPlainError } from '../../../common/runtime_types';
import { decodeOrThrow } from '../../../common/runtime_types';
import {
logRateModelPlotResponseRT,
createLogEntryRateQuery,
LogRateModelPlotBucket,
CompositeTimestampPartitionKey,
} from './queries';
import { getJobId } from '../../../common/log_analysis';
import { NoLogAnalysisResultsIndexError } from './errors';
import type { MlSystem } from '../../types';

const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
Expand Down Expand Up @@ -50,22 +46,14 @@ export async function getLogEntryRateBuckets(
)
);

if (mlModelPlotResponse._shards.total === 0) {
throw new NoLogAnalysisResultsIndexError(
`Failed to query ml result index for job ${logRateJobId}.`
);
}

const { after_key: afterKey, buckets: latestBatchBuckets } = pipe(
logRateModelPlotResponseRT.decode(mlModelPlotResponse),
map((response) => response.aggregations.timestamp_partition_buckets),
fold(throwErrors(createPlainError), identity)
);
const { after_key: afterKey, buckets: latestBatchBuckets = [] } =
decodeOrThrow(logRateModelPlotResponseRT)(mlModelPlotResponse).aggregations
?.timestamp_partition_buckets ?? {};

mlModelPlotBuckets = [...mlModelPlotBuckets, ...latestBatchBuckets];
afterLatestBatchKey = afterKey;

if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) {
if (afterKey == null || latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) {
break;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ export type LogEntryDatasetBucket = rt.TypeOf<typeof logEntryDatasetBucketRT>;

export const logEntryDatasetsResponseRT = rt.intersection([
commonSearchSuccessResponseFieldsRT,
rt.type({
rt.partial({
aggregations: rt.type({
dataset_buckets: rt.intersection([
rt.type({
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ export const logRateModelPlotBucketRT = rt.type({

export type LogRateModelPlotBucket = rt.TypeOf<typeof logRateModelPlotBucketRT>;

export const logRateModelPlotResponseRT = rt.type({
export const logRateModelPlotResponseRT = rt.partial({
aggregations: rt.type({
timestamp_partition_buckets: rt.intersection([
rt.type({
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ export type LogEntryCategoryBucket = rt.TypeOf<typeof logEntryCategoryBucketRT>;

export const topLogEntryCategoriesResponseRT = rt.intersection([
commonSearchSuccessResponseFieldsRT,
rt.type({
rt.partial({
aggregations: rt.type({
terms_category_id: rt.type({
buckets: rt.array(logEntryCategoryBucketRT),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,7 @@ import {
} from '../../../../common/http_api/log_analysis';
import { createValidationFunction } from '../../../../common/runtime_types';
import type { InfraBackendLibs } from '../../../lib/infra_types';
import {
getLogEntryAnomaliesDatasets,
NoLogAnalysisResultsIndexError,
} from '../../../lib/log_analysis';
import { getLogEntryAnomaliesDatasets } from '../../../lib/log_analysis';
import { assertHasInfraMlPlugins } from '../../../utils/request_context';

export const initGetLogEntryAnomaliesDatasetsRoute = ({ framework }: InfraBackendLibs) => {
Expand Down Expand Up @@ -58,10 +55,6 @@ export const initGetLogEntryAnomaliesDatasetsRoute = ({ framework }: InfraBacken
throw error;
}

if (error instanceof NoLogAnalysisResultsIndexError) {
return response.notFound({ body: { message: error.message } });
}

return response.customError({
statusCode: error.statusCode ?? 500,
body: {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,7 @@ import {
} from '../../../../common/http_api/log_analysis';
import { createValidationFunction } from '../../../../common/runtime_types';
import type { InfraBackendLibs } from '../../../lib/infra_types';
import {
getTopLogEntryCategories,
NoLogAnalysisResultsIndexError,
} from '../../../lib/log_analysis';
import { getTopLogEntryCategories } from '../../../lib/log_analysis';
import { assertHasInfraMlPlugins } from '../../../utils/request_context';

export const initGetLogEntryCategoriesRoute = ({ framework }: InfraBackendLibs) => {
Expand Down Expand Up @@ -69,10 +66,6 @@ export const initGetLogEntryCategoriesRoute = ({ framework }: InfraBackendLibs)
throw error;
}

if (error instanceof NoLogAnalysisResultsIndexError) {
return response.notFound({ body: { message: error.message } });
}

return response.customError({
statusCode: error.statusCode ?? 500,
body: {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,7 @@ import {
} from '../../../../common/http_api/log_analysis';
import { createValidationFunction } from '../../../../common/runtime_types';
import type { InfraBackendLibs } from '../../../lib/infra_types';
import {
getLogEntryCategoryDatasets,
NoLogAnalysisResultsIndexError,
} from '../../../lib/log_analysis';
import { getLogEntryCategoryDatasets } from '../../../lib/log_analysis';
import { assertHasInfraMlPlugins } from '../../../utils/request_context';

export const initGetLogEntryCategoryDatasetsRoute = ({ framework }: InfraBackendLibs) => {
Expand Down Expand Up @@ -58,10 +55,6 @@ export const initGetLogEntryCategoryDatasetsRoute = ({ framework }: InfraBackend
throw error;
}

if (error instanceof NoLogAnalysisResultsIndexError) {
return response.notFound({ body: { message: error.message } });
}

return response.customError({
statusCode: error.statusCode ?? 500,
body: {
Expand Down
Loading

0 comments on commit 2a67704

Please sign in to comment.