Skip to content

Commit

Permalink
Merge branch 'master' into fix/flaky-vislib-charts-71245
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine authored Jul 21, 2020
2 parents 36895cb + c74b214 commit b3846cb
Show file tree
Hide file tree
Showing 1,041 changed files with 18,277 additions and 18,884 deletions.
8 changes: 5 additions & 3 deletions .browserslistrc
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
[production]
last 2 versions
> 5%
Safari 7 # for PhantomJS support: https://github.com/elastic/kibana/issues/27136
last 2 Firefox versions
last 2 Chrome versions
last 2 Safari versions
> 0.25%
not ie 11

[dev]
last 1 chrome versions
Expand Down
36 changes: 19 additions & 17 deletions .ci/Jenkinsfile_baseline_capture
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,25 @@ library 'kibana-pipeline-library'
kibanaLibrary.load()

kibanaPipeline(timeoutMinutes: 120) {
ciStats.trackBuild {
catchError {
parallel([
'oss-visualRegression': {
workers.ci(name: 'oss-visualRegression', size: 's', ramDisk: false) {
kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')(1)
}
},
'xpack-visualRegression': {
workers.ci(name: 'xpack-visualRegression', size: 's', ramDisk: false) {
kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')(1)
}
},
])
}
githubCommitStatus.trackBuild(params.commit, 'kibana-ci-baseline') {
ciStats.trackBuild {
catchError {
parallel([
'oss-visualRegression': {
workers.ci(name: 'oss-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')(1)
}
},
'xpack-visualRegression': {
workers.ci(name: 'xpack-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')(1)
}
},
])
}

kibanaPipeline.sendMail()
slackNotifications.onFailure()
kibanaPipeline.sendMail()
slackNotifications.onFailure()
}
}
}
1 change: 1 addition & 0 deletions .ci/Jenkinsfile_coverage
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ kibanaPipeline(timeoutMinutes: 240) {
workers.base(name: 'coverage-worker', size: 'l', ramDisk: false, bootstrapped: false) {
catchError {
kibanaCoverage.runTests()
kibanaTeamAssign.load('team_assignment', "### Upload Team Assignment JSON")
handleIngestion(TIME_STAMP)
}
handleFail()
Expand Down
3 changes: 3 additions & 0 deletions .ci/end2end.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,9 @@ pipeline {
archiveArtifacts(allowEmptyArchive: true, artifacts: "${E2E_DIR}/kibana.log")
}
}
cleanup {
notifyBuildResult(notifyPRComment: false, analyzeFlakey: false, shouldNotify: false)
}
}
}

Expand Down
2 changes: 2 additions & 0 deletions .ci/pipeline-library/src/test/githubCommitStatus.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ class GithubCommitStatusTest extends KibanaBasePipelineTest {

interface BuildState {
Object get(String key)
Object has(String key)
}

interface GithubApi {
Expand All @@ -25,6 +26,7 @@ class GithubCommitStatusTest extends KibanaBasePipelineTest {
buildStateMock = mock(BuildState)
githubApiMock = mock(GithubApi)

when(buildStateMock.has('checkoutInfo')).thenReturn(true)
when(buildStateMock.get('checkoutInfo')).thenReturn([ commit: 'COMMIT_HASH', ])
when(githubApiMock.post(any(), any())).thenReturn(null)

Expand Down
13 changes: 13 additions & 0 deletions .ci/pipeline-library/src/test/prChanges.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -84,4 +84,17 @@ class PrChangesTest extends KibanaBasePipelineTest {

assertFalse(prChanges.areChangesSkippable())
}

@Test
void 'areChangesSkippable() with skippable changes that are in notSkippablePaths'() {
props([
githubPrs: [
getChanges: { [
[filename: 'docs/developer/architecture/code-exploration.asciidoc'],
] },
],
])

assertFalse(prChanges.areChangesSkippable())
}
}
2 changes: 1 addition & 1 deletion .eslintignore
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ target
/x-pack/plugins/canvas/canvas_plugin
/x-pack/plugins/canvas/canvas_plugin_src/lib/flot-charts
/x-pack/plugins/canvas/shareable_runtime/build
/x-pack/plugins/canvas/storybook
/x-pack/plugins/canvas/storybook/build
/x-pack/plugins/monitoring/public/lib/jquery_flot
/x-pack/plugins/reporting/server/export_types/printable_pdf/server/lib/pdf/assets/**
/x-pack/legacy/plugins/infra/common/graphql/types.ts
Expand Down
6 changes: 6 additions & 0 deletions .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -1222,6 +1222,12 @@ module.exports = {
],
},
},
{
files: ['x-pack/plugins/canvas/storybook/**'],
rules: {
'import/no-extraneous-dependencies': 0,
},
},
{
files: ['x-pack/plugins/canvas/canvas_plugin_src/**/*.js'],
globals: { canvas: true, $: true },
Expand Down
1 change: 1 addition & 0 deletions .sass-lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ files:
- 'src/legacy/core_plugins/metrics/**/*.s+(a|c)ss'
- 'src/plugins/timelion/**/*.s+(a|c)ss'
- 'src/plugins/vis_type_vislib/**/*.s+(a|c)ss'
- 'src/plugins/vis_type_vega/**/*.s+(a|c)ss'
- 'src/plugins/vis_type_xy/**/*.s+(a|c)ss'
- 'x-pack/plugins/canvas/**/*.s+(a|c)ss'
- 'x-pack/plugins/triggers_actions_ui/**/*.s+(a|c)ss'
Expand Down
3 changes: 0 additions & 3 deletions NOTICE.txt
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,6 @@ This module was heavily inspired by the externals plugin that ships with webpack
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra

---
This product has relied on ASTExplorer that is licensed under MIT.

---
This product includes code that is based on Ace editor, which was available
under a "BSD" license.
Expand Down
2 changes: 1 addition & 1 deletion docs/dev-tools/grokdebugger/index.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ in ingest node and Logstash.
This example walks you through using the *Grok Debugger*. This tool
is automatically enabled in {kib}.

NOTE: If you're using {security}, you must have the `manage_pipeline`
NOTE: If you're using {stack-security-features}, you must have the `manage_pipeline`
permission to use the Grok Debugger.

. Open the menu, go to *Dev Tools*, then click *Grok Debugger*.
Expand Down
2 changes: 1 addition & 1 deletion docs/developer/advanced/development-basepath.asciidoc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[[development-basepath]]
=== Considerations for basepath
== Considerations for basepath

In dev mode, {kib} by default runs behind a proxy which adds a random path component to its URL.

Expand Down
44 changes: 22 additions & 22 deletions docs/developer/advanced/development-es-snapshots.asciidoc
Original file line number Diff line number Diff line change
@@ -1,32 +1,32 @@
[[development-es-snapshots]]
=== Daily Elasticsearch Snapshots
== Daily {es} Snapshots

For local development and CI, {kib}, by default, uses Elasticsearch snapshots that are built daily when running tasks that require Elasticsearch (e.g. functional tests).
For local development and CI, {kib}, by default, uses {es} snapshots that are built daily when running tasks that require {es} (e.g. functional tests).

A snapshot is just a group of tarballs, one for each supported distribution/architecture/os of Elasticsearch, and a JSON-based manifest file containing metadata about the distributions.
A snapshot is just a group of tarballs, one for each supported distribution/architecture/os of {es}, and a JSON-based manifest file containing metadata about the distributions.

https://ci.kibana.dev/es-snapshots[A dashboard] is available that shows the current status and compatibility of the latest Elasticsearch snapshots.
https://ci.kibana.dev/es-snapshots[A dashboard] is available that shows the current status and compatibility of the latest {es} snapshots.

==== Process Overview
=== Process Overview

1. Elasticsearch snapshots are built for each current tracked branch of {kib}.
1. {es} snapshots are built for each current tracked branch of {kib}.
2. Each snapshot is uploaded to a public Google Cloud Storage bucket, `kibana-ci-es-snapshots-daily`.
** At this point, the snapshot is not automatically used in CI or local development. It needs to be tested/verified first.
3. Each snapshot is tested with the latest commit of the corresponding {kib} branch, using the full CI suite.
4. After CI
** If the snapshot passes, it is promoted and automatically used in CI and local development.
** If the snapshot fails, the issue must be investigated and resolved. A new incompatibility may exist between Elasticsearch and {kib}.
** If the snapshot fails, the issue must be investigated and resolved. A new incompatibility may exist between {es} and {kib}.

==== Using the latest snapshot
=== Using the latest snapshot

When developing locally, you may wish to use the most recent Elasticsearch snapshot, even if it's failing CI. To do so, prefix your commands with the follow environment variable:
When developing locally, you may wish to use the most recent {es} snapshot, even if it's failing CI. To do so, prefix your commands with the follow environment variable:

["source","bash"]
-----------
KBN_ES_SNAPSHOT_USE_UNVERIFIED=true
-----------

You can use this flag with any command that downloads and runs Elasticsearch snapshots, such as `scripts/es` or the FTR.
You can use this flag with any command that downloads and runs {es} snapshots, such as `scripts/es` or the FTR.

For example, to run functional tests with the latest snapshot:

Expand All @@ -35,7 +35,7 @@ For example, to run functional tests with the latest snapshot:
KBN_ES_SNAPSHOT_USE_UNVERIFIED=true node scripts/functional_tests_server
-----------

===== For Pull Requests
==== For Pull Requests

Currently, there is not a way to run your pull request with the latest unverified snapshot without a code change. You can, however, do it with a small code change.

Expand All @@ -45,9 +45,9 @@ Currently, there is not a way to run your pull request with the latest unverifie

Your pull request should then use the latest snapshot the next time that it runs. Just don't merge the change to `Jenkinsfile`!

==== Google Cloud Storage buckets
=== Google Cloud Storage buckets

===== kibana-ci-es-snapshots-daily
==== kibana-ci-es-snapshots-daily

This bucket stores snapshots that are created on a daily basis, and is the primary location used by `kbn-es` to download snapshots.

Expand All @@ -61,7 +61,7 @@ The file structure for this bucket looks like this:
* `<version>/archives/<unique id>/*.tar.gz.sha512`
* `<version>/archives/<unique id>/manifest.json`

===== kibana-ci-es-snapshots-permanent
==== kibana-ci-es-snapshots-permanent

This bucket stores only the most recently promoted snapshot for each version. Old snapshots are only deleted when new ones are uploaded.

Expand All @@ -73,18 +73,18 @@ The file structure for this bucket looks like this:
* `<version>/*.tar.gz.sha512`
* `<version>/manifest.json`

==== How snapshots are built, tested, and promoted
=== How snapshots are built, tested, and promoted

Each day, a https://kibana-ci.elastic.co/job/elasticsearch+snapshots+trigger/[Jenkins job] runs that triggers Elasticsearch builds for each currently tracked branch/version. This job is automatically updated with the correct branches whenever we release new versions of {kib}.
Each day, a https://kibana-ci.elastic.co/job/elasticsearch+snapshots+trigger/[Jenkins job] runs that triggers {es} builds for each currently tracked branch/version. This job is automatically updated with the correct branches whenever we release new versions of {kib}.

===== Build
==== Build

https://kibana-ci.elastic.co/job/elasticsearch+snapshots+build/[This Jenkins job] builds the Elasticsearch snapshots and uploads them to GCS.
https://kibana-ci.elastic.co/job/elasticsearch+snapshots+build/[This Jenkins job] builds the {es} snapshots and uploads them to GCS.

The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/master/.ci/es-snapshots/Jenkinsfile_build_es[in the {kib} repo].

1. Checkout Elasticsearch repo for the given branch/version.
2. Run `./gradlew -p distribution/archives assemble --parallel` to create all of the Elasticsearch distributions.
1. Checkout {es} repo for the given branch/version.
2. Run `./gradlew -p distribution/archives assemble --parallel` to create all of the {es} distributions.
3. Create a tarball for each distribution.
4. Create a manifest JSON file containing info about the distribution, as well as its download URL.
5. Upload the tarballs and manifest to a unique location in the GCS bucket `kibana-ci-es-snapshots-daily`.
Expand All @@ -93,9 +93,9 @@ The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/ma
** This allows the `KBN_ES_SNAPSHOT_USE_UNVERIFIED` flag to work.
7. Trigger the verification job, to run the full {kib} CI test suite with this snapshot.

===== Verification and Promotion
==== Verification and Promotion

https://kibana-ci.elastic.co/job/elasticsearch+snapshots+verify/[This Jenkins job] tests the latest Elasticsearch snapshot with the full {kib} CI pipeline, and promotes if it there are no test failures.
https://kibana-ci.elastic.co/job/elasticsearch+snapshots+verify/[This Jenkins job] tests the latest {es} snapshot with the full {kib} CI pipeline, and promotes if it there are no test failures.

The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/master/.ci/es-snapshots/Jenkinsfile_verify_es[in the {kib} repo].

Expand Down
6 changes: 3 additions & 3 deletions docs/developer/advanced/index.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
* <<development-es-snapshots>>
* <<development-basepath>>

include::development-es-snapshots.asciidoc[]
include::development-es-snapshots.asciidoc[leveloffset=+1]

include::running-elasticsearch.asciidoc[]
include::running-elasticsearch.asciidoc[leveloffset=+1]

include::development-basepath.asciidoc[]
include::development-basepath.asciidoc[leveloffset=+1]
30 changes: 15 additions & 15 deletions docs/developer/advanced/running-elasticsearch.asciidoc
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
[[running-elasticsearch]]
=== Running elasticsearch during development
== Running {es} during development

There are many ways to run Elasticsearch while you are developing.
There are many ways to run {es} while you are developing.

[float]
[discrete]

==== By snapshot
=== By snapshot

This will run a snapshot of elasticsearch that is usually built nightly. Read more about <<development-es-snapshots>>.
This will run a snapshot of {es} that is usually built nightly. Read more about <<development-es-snapshots>>.

[source,bash]
----
Expand All @@ -25,36 +25,36 @@ yarn es snapshot --help

**Keeping data between snapshots**

If you want to keep the data inside your Elasticsearch between usages of this command, you should use the following command, to keep your data folder outside the downloaded snapshot folder:
If you want to keep the data inside your {es} between usages of this command, you should use the following command, to keep your data folder outside the downloaded snapshot folder:

[source,bash]
----
yarn es snapshot -E path.data=../data
----

==== By source
=== By source

If you have the Elasticsearch repo checked out locally and wish to run against that, use `source`. By default, it will reference an elasticsearch checkout which is a sibling to the {kib} directory named elasticsearch. If you wish to use a checkout in another location you can provide that by supplying --source-path
If you have the {es} repo checked out locally and wish to run against that, use `source`. By default, it will reference an {es} checkout which is a sibling to the {kib} directory named elasticsearch. If you wish to use a checkout in another location you can provide that by supplying --source-path

[source,bash]
----
yarn es source
----

==== From an archive
=== From an archive

Use this if you already have a distributable. For released versions, one can be obtained on the Elasticsearch downloads page.
Use this if you already have a distributable. For released versions, one can be obtained on the {es} downloads page.

[source,bash]
----
yarn es archive <full_path_to_archive>
----

Each of these will run Elasticsearch with a basic license. Additional options are available, pass --help for more information.
Each of these will run {es} with a basic license. Additional options are available, pass --help for more information.

==== From a remote host
=== From a remote host

You can save some system resources, and the effort of generating sample data, if you have a remote Elasticsearch cluster to connect to. (Elasticians: you do! Check with your team about where to find credentials)
You can save some system resources, and the effort of generating sample data, if you have a remote {es} cluster to connect to. (Elasticians: you do! Check with your team about where to find credentials)

You'll need to create a kibana.dev.yml (<<customize-kibana-yml>>) and add the following to it:

Expand All @@ -75,7 +75,7 @@ kibana.index: '.{YourGitHubHandle}-kibana'
xpack.task_manager.index: '.{YourGitHubHandle}-task-manager-kibana'
----

===== Running remote clusters
==== Running remote clusters

Setup remote clusters for cross cluster search (CCS) and cross cluster replication (CCR).

Expand All @@ -95,7 +95,7 @@ yarn es snapshot -E transport.port=9500 -E http.port=9201 -E path.data=../data_p

Once both clusters are running, start {kib}. {kib} will connect to the primary cluster.

Setup the remote cluster in {kib} from either Management -> Elasticsearch -> Remote Clusters UI or by running the following script in Console.
Setup the remote cluster in {kib} from either Management -> {es} -> Remote Clusters UI or by running the following script in Console.

[source,bash]
----
Expand Down
Loading

0 comments on commit b3846cb

Please sign in to comment.