Skip to content

Commit

Permalink
Merge branch 'main' into siem-explore-issue-119024-3
Browse files Browse the repository at this point in the history
  • Loading branch information
kibanamachine authored Dec 21, 2021
2 parents d18437d + 33c367b commit 4baeec6
Show file tree
Hide file tree
Showing 1,150 changed files with 22,420 additions and 10,136 deletions.
1 change: 1 addition & 0 deletions .backportrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"targetBranchChoices": [
{ "name": "main", "checked": true },
"8.0",
"7.17",
"7.16",
"7.15",
"7.14",
Expand Down
34 changes: 0 additions & 34 deletions .buildkite/pipelines/flaky_tests/groups.json

This file was deleted.

31 changes: 17 additions & 14 deletions .buildkite/pipelines/flaky_tests/pipeline.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
const groups = /** @type {Array<{key: string, name: string, ciGroups: number }>} */(
require('./groups.json').groups
)

const stepInput = (key, nameOfSuite) => {
return {
key: `ftsr-suite/${key}`,
Expand All @@ -11,31 +7,38 @@ const stepInput = (key, nameOfSuite) => {
};
};

const OSS_CI_GROUPS = 12;
const XPACK_CI_GROUPS = 27;

const inputs = [
{
key: 'ftsr-override-count',
text: 'Override for all suites',
default: '0',
default: 0,
required: true,
},
];

for (const group of groups) {
if (!group.ciGroups) {
inputs.push(stepInput(group.key, group.name))
} else {
for (let i = 1; i <= group.ciGroups; i++) {
inputs.push(stepInput(`${group.key}/${i}`, `${group.name} ${i}`))
}
}
for (let i = 1; i <= OSS_CI_GROUPS; i++) {
inputs.push(stepInput(`oss/cigroup/${i}`, `OSS CI Group ${i}`));
}

inputs.push(stepInput(`oss/firefox`, 'OSS Firefox'));
inputs.push(stepInput(`oss/accessibility`, 'OSS Accessibility'));

for (let i = 1; i <= XPACK_CI_GROUPS; i++) {
inputs.push(stepInput(`xpack/cigroup/${i}`, `Default CI Group ${i}`));
}

inputs.push(stepInput(`xpack/cigroup/Docker`, 'Default CI Group Docker'));
inputs.push(stepInput(`xpack/firefox`, 'Default Firefox'));
inputs.push(stepInput(`xpack/accessibility`, 'Default Accessibility'));

const pipeline = {
steps: [
{
input: 'Number of Runs - Click Me',
fields: inputs,
if: `build.env('KIBANA_FLAKY_TEST_RUNNER_CONFIG') == null`
},
{
wait: '~',
Expand Down
92 changes: 22 additions & 70 deletions .buildkite/pipelines/flaky_tests/runner.js
Original file line number Diff line number Diff line change
@@ -1,85 +1,37 @@
const { execSync } = require('child_process');

const concurrency = 25;
const defaultCount = concurrency * 2;
const initialJobs = 3;

function getTestSuitesFromMetadata() {
const keys = execSync('buildkite-agent meta-data keys')
.toString()
.split('\n')
.filter((k) => k.startsWith('ftsr-suite/'));

const overrideCount = execSync(`buildkite-agent meta-data get 'ftsr-override-count'`).toString().trim();

const testSuites = [];
for (const key of keys) {
if (!key) {
continue;
}
const keys = execSync('buildkite-agent meta-data keys')
.toString()
.split('\n')
.filter((k) => k.startsWith('ftsr-suite/'));

const value =
overrideCount || execSync(`buildkite-agent meta-data get '${key}'`).toString().trim();
const overrideCount = parseInt(
execSync(`buildkite-agent meta-data get 'ftsr-override-count'`).toString().trim()
);

const count = value === '' ? defaultCount : parseInt(value);
totalJobs += count;

testSuites.push({
key: key.replace('ftsr-suite/', ''),
count: count,
});
}

return testSuites
}
const concurrency = 25;
const initialJobs = 3;

function getTestSuitesFromJson(json) {
const fail = (errorMsg) => {
console.error('+++ Invalid test config provided')
console.error(`${errorMsg}: ${json}`);
process.exit(1);
}
let totalJobs = initialJobs;

let parsed;
try {
parsed = JSON.parse(json)
} catch (error) {
fail(`JSON test config did not parse correctly`)
const testSuites = [];
for (const key of keys) {
if (!key) {
continue;
}

if (!Array.isArray(parsed)) {
fail(`JSON test config must be an array`)
}
const value =
overrideCount || execSync(`buildkite-agent meta-data get '${key}'`).toString().trim();

/** @type {Array<{ key: string, count: number }>} */
const testSuites = []
for (const item of parsed) {
if (typeof item !== 'object' || item === null) {
fail(`testSuites must be objects`)
}
const key = item.key
if (typeof key !== 'string') {
fail(`testSuite.key must be a string`)
}
const count = item.count;
if (typeof count !== 'number') {
fail(`testSuite.count must be a number`)
}
testSuites.push({
key,
count,
})
}
const count = value === '' ? defaultCount : parseInt(value);
totalJobs += count;

return testSuites
testSuites.push({
key: key.replace('ftsr-suite/', ''),
count: count,
});
}

const testSuites = process.env.KIBANA_FLAKY_TEST_RUNNER_CONFIG
? getTestSuitesFromJson(process.env.KIBANA_FLAKY_TEST_RUNNER_CONFIG)
: getTestSuitesFromMetadata();

let totalJobs = testSuites.reduce((acc, t) => acc + t.count, initialJobs);

if (totalJobs > 500) {
console.error('+++ Too many tests');
console.error(
Expand Down
2 changes: 1 addition & 1 deletion .buildkite/scripts/build_kibana.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ if [[ "${GITHUB_PR_LABELS:-}" == *"ci:deploy-cloud"* ]]; then
--docker-tag-qualifier="$GIT_COMMIT" \
--docker-push \
--skip-docker-ubi \
--skip-docker-centos \
--skip-docker-ubuntu \
--skip-docker-contexts

CLOUD_IMAGE=$(docker images --format "{{.Repository}}:{{.Tag}}" docker.elastic.co/kibana-ci/kibana-cloud)
Expand Down
2 changes: 1 addition & 1 deletion .buildkite/scripts/post_build_kibana.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@ echo "--- Upload Build Artifacts"
# Moving to `target/` first will keep `buildkite-agent` from including directories in the artifact name
cd "$KIBANA_DIR/target"
cp kibana-*-linux-x86_64.tar.gz kibana-default.tar.gz
buildkite-agent artifact upload "./*.tar.gz;./*.zip"
buildkite-agent artifact upload "./*.tar.gz;./*.zip;./*.deb;./*.rpm"
cd -
2 changes: 1 addition & 1 deletion .buildkite/scripts/steps/demo_env/kibana.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ source "$(dirname "${0}")/config.sh"
export KIBANA_IMAGE="gcr.io/elastic-kibana-184716/demo/kibana:$DEPLOYMENT_NAME-$(git rev-parse HEAD)"

echo '--- Build Kibana'
node scripts/build --debug --docker-images --example-plugins --skip-os-packages --skip-docker-ubi
node scripts/build --debug --docker-images --example-plugins --skip-docker-ubi

echo '--- Build Docker image with example plugins'
cd target/example_plugins
Expand Down
4 changes: 0 additions & 4 deletions .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -850,10 +850,6 @@ module.exports = {
name: 'semver',
message: 'Please use "semver/*/{function}" instead',
},
{
name: '@kbn/rule-data-utils',
message: `Import directly from @kbn/rule-data-utils/* submodules in public/common code`,
},
],
},
],
Expand Down
8 changes: 4 additions & 4 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@
/packages/elastic-datemath/ @elastic/kibana-app-services
/packages/kbn-interpreter/ @elastic/kibana-app-services
/packages/kbn-react-field/ @elastic/kibana-app-services
/packages/kbn-es-query/ @elastic/kibana-app-services
/packages/kbn-field-types/ @elastic/kibana-app-services
/src/plugins/bfetch/ @elastic/kibana-app-services
/src/plugins/data/ @elastic/kibana-app-services
/src/plugins/data_views/ @elastic/kibana-app-services
Expand Down Expand Up @@ -409,15 +411,13 @@
/x-pack/plugins/security_solution/public/common/lib/endpoint*/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/public/common/components/endpoint/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/common/endpoint/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/server/endpoint/routes/trusted_apps/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/server/endpoint/routes/actions/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/server/endpoint/routes/metadata/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/server/endpoint/lib/policy/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/server/endpoint/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/server/lib/license/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/server/fleet_integration/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/scripts/endpoint/event_filters/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/plugins/security_solution/scripts/endpoint/trusted_apps/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/test/security_solution_endpoint/apps/endpoint/ @elastic/security-onboarding-and-lifecycle-mgt
/x-pack/test/security_solution_endpoint_api_int/ @elastic/security-onboarding-and-lifecycle-mgt

## Security Solution sub teams - security-telemetry (Data Engineering)
x-pack/plugins/security_solution/server/usage/ @elastic/security-telemetry
Expand Down
2 changes: 1 addition & 1 deletion docs/api/saved-objects.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Manage {kib} saved objects, including dashboards, visualizations, and more.
WARNING: Do not write documents directly to the `.kibana` index. When you write directly
to the `.kibana` index, the data becomes corrupted and permanently breaks future {kib} versions.

NOTE: For managing {kib} index patterns, use the <<data-views-api, index patterns API>>.
NOTE: For managing {data-sources}, use the <<data-views-api, {data-sources} API>>.

The following saved objects APIs are available:

Expand Down
4 changes: 2 additions & 2 deletions docs/api/saved-objects/bulk_create.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ Saved objects that are unable to persist are replaced with an error object.
[[saved-objects-api-bulk-create-example]]
==== Example

Create an index pattern with the `my-pattern` ID, and a dashboard with the `my-dashboard` ID:
Create {a-data-source} with the `my-pattern` ID, and a dashboard with the `my-dashboard` ID:

[source,sh]
--------------------------------------------------
Expand Down Expand Up @@ -122,7 +122,7 @@ The API returns the following:
}
--------------------------------------------------

There is already a saved object with the `my-dashboard` ID, so only the index pattern is created.
There is already a saved object with the `my-dashboard` ID, so only the {data-source} is created.

[[saved-objects-api-bulk-create-conflict-errors]]
==== Conflict errors
Expand Down
4 changes: 2 additions & 2 deletions docs/api/saved-objects/bulk_get.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ Saved objects that are unable to persist are replaced with an error object.
[[saved-objects-api-bulk-get-body-example]]
==== Example

Retrieve an index pattern with the `my-pattern` ID, and a dashboard with the `my-dashboard` ID:
Retrieve a {data-source} with the `my-pattern` ID, and a dashboard with the `my-dashboard` ID:

[source,sh]
--------------------------------------------------
Expand Down Expand Up @@ -103,4 +103,4 @@ The API returns the following:
}
--------------------------------------------------

Only the index pattern exists.
Only the {data-source} exists.
2 changes: 1 addition & 1 deletion docs/api/saved-objects/delete.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ TIP: Use this if you attempted to delete an object and received an HTTP 400 erro

==== Example

Delete an index pattern object with the `my-pattern` ID:
Delete {a-data-source} object with the `my-pattern` ID:

[source,sh]
--------------------------------------------------
Expand Down
4 changes: 2 additions & 2 deletions docs/api/saved-objects/export.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ When `excludeExportDetails=false` (the default) we append an export result detai
[[ssaved-objects-api-create-example]]
==== Examples

Export all index pattern saved objects:
Export all {data-source} saved objects:

[source,sh]
--------------------------------------------------
Expand All @@ -78,7 +78,7 @@ $ curl -X POST api/saved_objects/_export -H 'kbn-xsrf: true' -H 'Content-Type: a
--------------------------------------------------
// KIBANA

Export all index pattern saved objects and exclude the export summary from the stream:
Export all {data-source} saved objects and exclude the export summary from the stream:

[source,sh]
--------------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion docs/api/saved-objects/find.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ change. Use the find API for traditional paginated results, but avoid using it t

==== Examples

Find index patterns with titles that start with `my`:
Find {data-sources} with titles that start with `my`:

[source,sh]
--------------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion docs/api/saved-objects/get.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ experimental[] Retrieve a single {kib} saved object by ID.
[[saved-objects-api-get-example]]
==== Example

Retrieve the index pattern object with the `my-pattern` ID:
Retrieve the {data-source} object with the `my-pattern` ID:

[source,sh]
--------------------------------------------------
Expand Down
Loading

0 comments on commit 4baeec6

Please sign in to comment.