Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Fleet] parse elasticsearch.source_mode from manifest + small refactor #144464

Merged
merged 2 commits into from
Nov 3, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions x-pack/plugins/fleet/common/types/models/epm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,7 @@ export interface RegistryElasticsearch {
'index_template.settings'?: estypes.IndicesIndexSettings;
'index_template.mappings'?: estypes.MappingTypeMapping;
'ingest_pipeline.name'?: string;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am curious about the usage of this key, it looks like to me that is not used anywhere

Copy link
Contributor Author

@hop-dev hop-dev Nov 3, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@nchaulet do you mean ingest_pipeline.name? It was added as part of this PR to make our logic the same as registry info:

https://github.com/elastic/kibana/pull/126915/files

It was added to the registry 2 years ago here elastic/package-registry#564 , I am not sure it was ever implemented in Kibana (until we implemented it in the above PR to make sure we match the registry) but I am nervous to delete it :D Should I just go for it?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@nchaulet turns out the system package uses this functionality

source_mode?: 'default' | 'synthetic';
}

export interface RegistryDataStreamPrivileges {
Expand Down
111 changes: 111 additions & 0 deletions x-pack/plugins/fleet/server/services/epm/archive/parse.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { parseDefaultIngestPipeline, parseDataStreamElasticsearchEntry } from './parse';
describe('parseDefaultIngestPipeline', () => {
it('Should return undefined for stream without any elasticsearch dir', () => {
expect(
parseDefaultIngestPipeline({
pkgKey: 'pkg-1.0.0',
paths: ['pkg-1.0.0/data_stream/stream1/manifest.yml'],
dataStreamPath: 'stream1',
})
).toEqual(undefined);
});
it('Should return undefined for stream with non default ingest pipeline', () => {
expect(
parseDefaultIngestPipeline({
pkgKey: 'pkg-1.0.0',
paths: [
'pkg-1.0.0/data_stream/stream1/manifest.yml',
'pkg-1.0.0/data_stream/stream1/elasticsearch/ingest_pipeline/someotherpipeline.yml',
],
dataStreamPath: 'stream1',
})
).toEqual(undefined);
});
it('Should return default for yml ingest pipeline', () => {
expect(
parseDefaultIngestPipeline({
pkgKey: 'pkg-1.0.0',
paths: [
'pkg-1.0.0/data_stream/stream1/manifest.yml',
'pkg-1.0.0/data_stream/stream1/elasticsearch/ingest_pipeline/default.yml',
],
dataStreamPath: 'stream1',
})
).toEqual('default');
});
it('Should return default for json ingest pipeline', () => {
expect(
parseDefaultIngestPipeline({
pkgKey: 'pkg-1.0.0',
paths: [
'pkg-1.0.0/data_stream/stream1/manifest.yml',
'pkg-1.0.0/data_stream/stream1/elasticsearch/ingest_pipeline/default.json',
],
dataStreamPath: 'stream1',
})
).toEqual('default');
});
});

describe('parseDataStreamElasticsearchEntry', () => {
it('Should handle empty elasticsearch', () => {
expect(parseDataStreamElasticsearchEntry({})).toEqual({});
});
it('Should not include junk keys', () => {
expect(parseDataStreamElasticsearchEntry({ a: 1, b: 2 })).toEqual({});
});
it('Should add index pipeline', () => {
expect(parseDataStreamElasticsearchEntry({}, 'default')).toEqual({
'ingest_pipeline.name': 'default',
});
});
it('Should add privileges', () => {
expect(
parseDataStreamElasticsearchEntry({ privileges: { index: ['priv1'], cluster: ['priv2'] } })
).toEqual({ privileges: { index: ['priv1'], cluster: ['priv2'] } });
});
it('Should add source_mode', () => {
expect(parseDataStreamElasticsearchEntry({ source_mode: 'default' })).toEqual({
source_mode: 'default',
});
expect(parseDataStreamElasticsearchEntry({ source_mode: 'synthetic' })).toEqual({
source_mode: 'synthetic',
});
});
it('Should add index_template mappings and expand dots', () => {
expect(
parseDataStreamElasticsearchEntry({
index_template: { mappings: { dynamic: false, something: { 'dot.somethingelse': 'val' } } },
})
).toEqual({
'index_template.mappings': { dynamic: false, something: { dot: { somethingelse: 'val' } } },
});
});
it('Should add index_template settings and expand dots', () => {
expect(
parseDataStreamElasticsearchEntry({
index_template: {
settings: {
index: {
codec: 'best_compression',
'sort.field': 'monitor.id',
},
},
},
})
).toEqual({
'index_template.settings': {
index: {
codec: 'best_compression',
sort: { field: 'monitor.id' },
},
},
});
});
});
99 changes: 60 additions & 39 deletions x-pack/plugins/fleet/server/services/epm/archive/parse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -295,52 +295,19 @@ export function parseAndVerifyDataStreams(
elasticsearch,
...restOfProps
} = manifest;

if (!(dataStreamTitle && type)) {
throw new PackageInvalidArchiveError(
`Invalid manifest for data stream '${dataStreamPath}': one or more fields missing of 'title', 'type'`
);
}

let ingestPipeline;
const ingestPipelinePaths = paths.filter((filePath) =>
filePath.startsWith(`${pkgKey}/data_stream/${dataStreamPath}/elasticsearch/ingest_pipeline`)
);

if (
ingestPipelinePaths.length &&
(ingestPipelinePaths.some((ingestPipelinePath) =>
ingestPipelinePath.endsWith(DEFAULT_INGEST_PIPELINE_FILE_NAME_YML)
) ||
ingestPipelinePaths.some((ingestPipelinePath) =>
ingestPipelinePath.endsWith(DEFAULT_INGEST_PIPELINE_FILE_NAME_JSON)
))
) {
ingestPipeline = DEFAULT_INGEST_PIPELINE_VALUE;
}

const ingestPipeline = parseDefaultIngestPipeline({ pkgKey, dataStreamPath, paths });
const streams = parseAndVerifyStreams(manifestStreams, dataStreamPath);

const parsedElasticsearchEntry: Record<string, any> = {};

if (ingestPipeline) {
parsedElasticsearchEntry['ingest_pipeline.name'] = DEFAULT_INGEST_PIPELINE_VALUE;
}

if (elasticsearch?.privileges) {
parsedElasticsearchEntry.privileges = elasticsearch.privileges;
}

if (elasticsearch?.index_template?.mappings) {
parsedElasticsearchEntry['index_template.mappings'] = expandDottedEntries(
elasticsearch.index_template.mappings
);
}

if (elasticsearch?.index_template?.settings) {
parsedElasticsearchEntry['index_template.settings'] = expandDottedEntries(
elasticsearch.index_template.settings
);
}
const parsedElasticsearchEntry = parseDataStreamElasticsearchEntry(
elasticsearch,
ingestPipeline
);

// Build up the stream object here so we can conditionally insert nullable fields. The package registry omits undefined
// fields, so we're mimicking that behavior here.
Expand Down Expand Up @@ -534,3 +501,57 @@ export function parseAndVerifyInputs(manifestInputs: any, location: string): Reg
}
return inputs;
}

export function parseDataStreamElasticsearchEntry(
elasticsearch: Record<string, any>,
ingestPipeline?: string
) {
const parsedElasticsearchEntry: Record<string, any> = {};

if (ingestPipeline) {
parsedElasticsearchEntry['ingest_pipeline.name'] = ingestPipeline;
}

if (elasticsearch?.privileges) {
parsedElasticsearchEntry.privileges = elasticsearch.privileges;
}

if (elasticsearch?.source_mode) {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Source mode can only be one of two values, not sure if we should be validating here? We only seem to do high level validation

parsedElasticsearchEntry.source_mode = elasticsearch.source_mode;
}

if (elasticsearch?.index_template?.mappings) {
parsedElasticsearchEntry['index_template.mappings'] = expandDottedEntries(
elasticsearch.index_template.mappings
);
}

if (elasticsearch?.index_template?.settings) {
parsedElasticsearchEntry['index_template.settings'] = expandDottedEntries(
elasticsearch.index_template.settings
);
}

return parsedElasticsearchEntry;
}

const isDefaultPipelineFile = (pipelinePath: string) =>
pipelinePath.endsWith(DEFAULT_INGEST_PIPELINE_FILE_NAME_YML) ||
pipelinePath.endsWith(DEFAULT_INGEST_PIPELINE_FILE_NAME_JSON);

export function parseDefaultIngestPipeline(opts: {
pkgKey: string;
paths: string[];
dataStreamPath: string;
}) {
const { pkgKey, paths, dataStreamPath } = opts;
const ingestPipelineDirPath = `${pkgKey}/data_stream/${dataStreamPath}/elasticsearch/ingest_pipeline`;
const defaultIngestPipelinePaths = paths.filter(
(pipelinePath) =>
pipelinePath.startsWith(ingestPipelineDirPath) && isDefaultPipelineFile(pipelinePath)
);

if (!defaultIngestPipelinePaths.length) return undefined;

return DEFAULT_INGEST_PIPELINE_VALUE;
}