Skip to content

Commit

Permalink
Review feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
rudolf committed Aug 31, 2021
1 parent 5de419f commit e330326
Show file tree
Hide file tree
Showing 16 changed files with 216 additions and 100 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ const createMigrator = (
kibanaVersion: '8.0.0-testing',
soMigrationsConfig: {
batchSize: 100,
batchSizeBytes: new ByteSizeValue(30_000),
maxBatchSizeBytes: ByteSizeValue.parse('30kb'),
scrollDuration: '15m',
pollInterval: 1500,
skip: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ const mockOptions = ({ enableV2 }: { enableV2: boolean } = { enableV2: false })
} as KibanaMigratorOptions['kibanaConfig'],
soMigrationsConfig: {
batchSize: 20,
batchSizeBytes: new ByteSizeValue(20_000),
maxBatchSizeBytes: ByteSizeValue.parse('20mb'),
pollInterval: 20000,
scrollDuration: '10m',
skip: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ describe('createInitialState', () => {
const migrationsConfig = ({
retryAttempts: 15,
batchSize: 1000,
batchSizeBytes: new ByteSizeValue(1e8),
maxBatchSizeBytes: ByteSizeValue.parse('100mb'),
} as unknown) as SavedObjectsMigrationConfigType;
it('creates the initial state for the model based on the passed in parameters', () => {
expect(
Expand All @@ -39,7 +39,7 @@ describe('createInitialState', () => {
})
).toEqual({
batchSize: 1000,
batchSizeBytes: new ByteSizeValue(ByteSizeValue.parse('100mb')),
maxBatchSizeBytes: ByteSizeValue.parse('100mb').getValueInBytes(),
controlState: 'INIT',
currentAlias: '.kibana_task_manager',
excludeFromUpgradeFilterHooks: {},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ export const createInitialState = ({
retryDelay: 0,
retryAttempts: migrationsConfig.retryAttempts,
batchSize: migrationsConfig.batchSize,
batchSizeBytes: migrationsConfig.batchSizeBytes.getValueInBytes(),
maxBatchSizeBytes: migrationsConfig.maxBatchSizeBytes.getValueInBytes(),
logs: [],
unusedTypesQuery: excludeUnusedTypesQuery,
knownTypes,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@ import { ElasticsearchClient } from '../../../elasticsearch';
import { Env } from '@kbn/config';
import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../../../config/mocks';
import { retryAsync } from '../test_helpers/retry_async';
import { LogRecord } from '@kbn/logging';

const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
const targetIndex = `.kibana_${kibanaVersion}_001`;
const logFilePath = Path.join(__dirname, '7_13_unknown_types_test.log');
const logFilePath = Path.join(__dirname, '7_13_unknown_types.log');

async function removeLogFile() {
// ignore errors if it doesn't exist
Expand Down Expand Up @@ -68,23 +70,30 @@ describe('migration v2', () => {
await root.setup();
await root.start();

const logFileContent = await fs.readFile(logFilePath, 'utf-8');
const records = logFileContent
.split('\n')
.filter(Boolean)
.map((str) => JSON5.parse(str));
let unknownDocsWarningLog: LogRecord;

const unknownDocsWarningLog = records.find((rec) =>
rec.message.startsWith(`[.kibana] CHECK_UNKNOWN_DOCUMENTS`)
);
await retryAsync(
async () => {
const logFileContent = await fs.readFile(logFilePath, 'utf-8');
const records = logFileContent
.split('\n')
.filter(Boolean)
.map((str) => JSON5.parse(str));

unknownDocsWarningLog = records.find((rec) =>
rec.message.startsWith(`[.kibana] CHECK_UNKNOWN_DOCUMENTS`)
);

expect(
unknownDocsWarningLog.message.startsWith(
'[.kibana] CHECK_UNKNOWN_DOCUMENTS Upgrades will fail for 8.0+ because documents were found for unknown saved ' +
'object types. To ensure that upgrades will succeed in the future, either re-enable plugins or delete ' +
`these documents from the "${targetIndex}" index after the current upgrade completes.`
)
).toBeTruthy();
expect(
unknownDocsWarningLog.message.startsWith(
'[.kibana] CHECK_UNKNOWN_DOCUMENTS Upgrades will fail for 8.0+ because documents were found for unknown saved ' +
'object types. To ensure that upgrades will succeed in the future, either re-enable plugins or delete ' +
`these documents from the "${targetIndex}" index after the current upgrade completes.`
)
).toBeTruthy();
},
{ retryAttempts: 10, retryDelayMs: 200 }
);

const unknownDocs = [
{ type: 'space', id: 'space:default' },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ import { ElasticsearchClient } from '../../../elasticsearch';
import { Env } from '@kbn/config';
import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../../../config/mocks';
import { LogRecord } from '@kbn/logging';
import { retryAsync } from '../test_helpers/retry_async';

const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
const targetIndex = `.kibana_${kibanaVersion}_001`;
Expand Down Expand Up @@ -59,7 +61,7 @@ describe('migration v2', () => {
});

it('completes the migration even when a full batch would exceed ES http.max_content_length', async () => {
root = createRoot({ batchSizeBytes: 1715275 });
root = createRoot({ maxBatchSizeBytes: 1715275 });
esServer = await startES();
await root.preboot();
await root.setup();
Expand All @@ -80,39 +82,43 @@ describe('migration v2', () => {
expect(migratedIndexResponse.body.count).toBeGreaterThanOrEqual(oldIndexResponse.body.count);
});

it('fails with a descriptive message when a single document exceeds batchSizeBytes', async () => {
root = createRoot({ batchSizeBytes: 1015275 });
it('fails with a descriptive message when a single document exceeds maxBatchSizeBytes', async () => {
root = createRoot({ maxBatchSizeBytes: 1015275 });
esServer = await startES();
await root.preboot();
await root.setup();
await expect(root.start()).rejects.toMatchInlineSnapshot(
`[Error: Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715275 bytes which equals or exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the migrations.batchSizeBytes Kibana configuration option and ensure that the Elasticsearch http.max_content_length configuration option is set to an equal or larger value.]`
`[Error: Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715275 bytes which exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the 'migrations.maxBatchSizeBytes' Kibana configuration option and ensure that the Elasticsearch 'http.max_content_length' configuration option is set to an equal or larger value.]`
);

const logFileContent = await fs.readFile(logFilePath, 'utf-8');
const records = logFileContent
.split('\n')
.filter(Boolean)
.map((str) => JSON5.parse(str)) as LogRecord[];

expect(
records.find((rec) =>
rec.message.startsWith(
`Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715275 bytes which equals or exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the migrations.batchSizeBytes Kibana configuration option and ensure that the Elasticsearch http.max_content_length configuration option is set to an equal or larger value.`
)
)
).toBeDefined();
await retryAsync(
async () => {
const logFileContent = await fs.readFile(logFilePath, 'utf-8');
const records = logFileContent
.split('\n')
.filter(Boolean)
.map((str) => JSON5.parse(str)) as LogRecord[];
expect(
records.find((rec) =>
rec.message.startsWith(
`Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715275 bytes which exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the 'migrations.maxBatchSizeBytes' Kibana configuration option and ensure that the Elasticsearch 'http.max_content_length' configuration option is set to an equal or larger value.`
)
)
).toBeDefined();
},
{ retryAttempts: 10, retryDelayMs: 200 }
);
});
});

function createRoot(options: { batchSizeBytes?: number }) {
function createRoot(options: { maxBatchSizeBytes?: number }) {
return kbnTestServer.createRootWithCorePlugins(
{
migrations: {
skip: false,
enableV2: true,
batchSize: 1000,
batchSizeBytes: options.batchSizeBytes,
maxBatchSizeBytes: options.maxBatchSizeBytes,
},
logging: {
appenders: {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import fs from 'fs/promises';
import JSON5 from 'json5';
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
import { Root } from '../../../root';
import { retryAsync } from '../test_helpers/retry_async';

const logFilePath = Path.join(__dirname, 'batch_size_bytes_exceeds_es_content_length.log');

Expand Down Expand Up @@ -52,39 +53,44 @@ describe('migration v2', () => {
await new Promise((resolve) => setTimeout(resolve, 10000));
});

it('fails with a descriptive message when batchSizeBytes exceeds ES http.max_content_length', async () => {
root = createRoot({ batchSizeBytes: 1715275 });
it('fails with a descriptive message when maxBatchSizeBytes exceeds ES http.max_content_length', async () => {
root = createRoot({ maxBatchSizeBytes: 1715275 });
esServer = await startES();
await root.preboot();
await root.setup();
await expect(root.start()).rejects.toMatchInlineSnapshot(
`[Error: Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.batchSize' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.]`
`[Error: Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.]`
);

const logFileContent = await fs.readFile(logFilePath, 'utf-8');
const records = logFileContent
.split('\n')
.filter(Boolean)
.map((str) => JSON5.parse(str)) as any[];
await retryAsync(
async () => {
const logFileContent = await fs.readFile(logFilePath, 'utf-8');
const records = logFileContent
.split('\n')
.filter(Boolean)
.map((str) => JSON5.parse(str)) as any[];

expect(
records.find((rec) =>
rec.message.startsWith(
`Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.batchSize' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.`
)
)
).toBeDefined();
expect(
records.find((rec) =>
rec.message.startsWith(
`Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.`
)
)
).toBeDefined();
},
{ retryAttempts: 10, retryDelayMs: 200 }
);
});
});

function createRoot(options: { batchSizeBytes?: number }) {
function createRoot(options: { maxBatchSizeBytes?: number }) {
return kbnTestServer.createRootWithCorePlugins(
{
migrations: {
skip: false,
enableV2: true,
batchSize: 1000,
batchSizeBytes: options.batchSizeBytes,
maxBatchSizeBytes: options.maxBatchSizeBytes,
},
logging: {
appenders: {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ describe('migrationsStateActionMachine', () => {
indexPrefix: '.my-so-index',
migrationsConfig: {
batchSize: 1000,
batchSizeBytes: new ByteSizeValue(1e8),
maxBatchSizeBytes: new ByteSizeValue(1e8),
pollInterval: 0,
scrollDuration: '0s',
skip: false,
Expand Down Expand Up @@ -260,7 +260,7 @@ describe('migrationsStateActionMachine', () => {
kibana: {
migrationState: {
batchSize: 1000,
batchSizeBytes: 1e8,
maxBatchSizeBytes: 1e8,
controlState: 'LEGACY_DELETE',
currentAlias: '.my-so-index',
excludeFromUpgradeFilterHooks: {},
Expand Down Expand Up @@ -309,7 +309,7 @@ describe('migrationsStateActionMachine', () => {
kibana: {
migrationState: {
batchSize: 1000,
batchSizeBytes: 1e8,
maxBatchSizeBytes: 1e8,
controlState: 'FATAL',
currentAlias: '.my-so-index',
excludeFromUpgradeFilterHooks: {},
Expand Down Expand Up @@ -454,7 +454,7 @@ describe('migrationsStateActionMachine', () => {
kibana: {
migrationState: {
batchSize: 1000,
batchSizeBytes: 1e8,
maxBatchSizeBytes: 1e8,
controlState: 'LEGACY_REINDEX',
currentAlias: '.my-so-index',
excludeFromUpgradeFilterHooks: {},
Expand Down Expand Up @@ -497,7 +497,7 @@ describe('migrationsStateActionMachine', () => {
kibana: {
migrationState: {
batchSize: 1000,
batchSizeBytes: 1e8,
maxBatchSizeBytes: 1e8,
controlState: 'LEGACY_DELETE',
currentAlias: '.my-so-index',
excludeFromUpgradeFilterHooks: {},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,28 +10,29 @@ import { SavedObjectsRawDoc } from '../../serialization';
import { createBatches } from './create_batches';

describe('createBatches', () => {
const DOCUMENT_SIZE_BYTES = 126;
const NEWLINE_SIZE_BYTES = 1;
const DOCUMENT_SIZE_BYTES = 128;
const INDEX = '.kibana_version_index';
it('returns right one batch if all documents fit in maxBatchSizeBytes', () => {
const documents = [
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 1' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 2' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 3' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title ¹' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title ²' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title ®' } },
];

expect(createBatches(documents, INDEX, DOCUMENT_SIZE_BYTES * 3 + NEWLINE_SIZE_BYTES)).toEqual(
expect(createBatches(documents, INDEX, DOCUMENT_SIZE_BYTES * 3)).toEqual(
Either.right([documents])
);
});
it('creates multiple batches with each batch limited to maxBatchSizeBytes', () => {
const documents = [
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 1' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 2' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 3' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title ¹' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title ²' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title ®' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 44' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 55' } },
];
expect(createBatches(documents, INDEX, DOCUMENT_SIZE_BYTES + NEWLINE_SIZE_BYTES)).toEqual(
Either.right([[documents[0]], [documents[1]], [documents[2]]])
expect(createBatches(documents, INDEX, DOCUMENT_SIZE_BYTES * 2)).toEqual(
Either.right([[documents[0], documents[1]], [documents[2], documents[3]], [documents[4]]])
);
});
it('creates a single empty batch if there are no documents', () => {
Expand All @@ -40,20 +41,20 @@ describe('createBatches', () => {
});
it('throws if any one document exceeds the maxBatchSizeBytes', () => {
const documents = [
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 1' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title ¹' } },
{
_id: '',
_source: {
type: 'dashboard',
title: 'my saved object title 2 with a very long title that exceeds max size bytes',
title: 'my saved object title ² with a very long title that exceeds max size bytes',
},
},
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title 3' } },
{ _id: '', _source: { type: 'dashboard', title: 'my saved object title ®' } },
];
expect(createBatches(documents, INDEX, DOCUMENT_SIZE_BYTES + NEWLINE_SIZE_BYTES)).toEqual(
expect(createBatches(documents, INDEX, 178)).toEqual(
Either.left({
batchSizeBytes: 127,
docSizeBytes: 178,
maxBatchSizeBytes: 178,
docSizeBytes: 179,
type: 'document_exceeds_batch_size_bytes',
document: documents[1],
})
Expand Down
Loading

0 comments on commit e330326

Please sign in to comment.