diff --git a/.eslintrc b/.eslintrc
index 6a291608cdf50..7a623df06a6ca 100644
--- a/.eslintrc
+++ b/.eslintrc
@@ -1,75 +1,2 @@
---
-parser: babel-eslint
-
-plugins:
- - mocha
-
-env:
- es6: true
- amd: true
- node: true
- mocha: true
- browser: true
-
-
-rules:
- block-scoped-var: 2
- camelcase: [ 2, { properties: never } ]
- comma-dangle: 0
- comma-style: [ 2, last ]
- consistent-return: 0
- curly: [ 2, multi-line ]
- dot-location: [ 2, property ]
- dot-notation: [ 2, { allowKeywords: true } ]
- eqeqeq: [ 2, allow-null ]
- guard-for-in: 2
- indent: [ 2, 2, { SwitchCase: 1 } ]
- key-spacing: [ 0, { align: value } ]
- max-len: [ 2, 140, 2, { ignoreComments: true, ignoreUrls: true } ]
- new-cap: [ 2, { capIsNewExceptions: [ Private ] } ]
- no-bitwise: 0
- no-caller: 2
- no-cond-assign: 0
- no-debugger: 2
- no-empty: 2
- no-eval: 2
- no-extend-native: 2
- no-extra-parens: 0
- no-irregular-whitespace: 2
- no-iterator: 2
- no-loop-func: 2
- no-multi-spaces: 0
- no-multi-str: 2
- no-nested-ternary: 2
- no-new: 0
- no-path-concat: 0
- no-proto: 2
- no-return-assign: 0
- no-script-url: 2
- no-sequences: 2
- no-shadow: 0
- no-trailing-spaces: 2
- no-undef: 2
- no-underscore-dangle: 0
- no-unused-expressions: 0
- no-unused-vars: 0
- no-use-before-define: [ 2, nofunc ]
- no-with: 2
- one-var: [ 2, never ]
- quotes: [ 2, single ]
- semi-spacing: [ 2, { before: false, after: true } ]
- semi: [ 2, always ]
- space-after-keywords: [ 2, always ]
- space-before-blocks: [ 2, always ]
- space-before-function-paren: [ 2, { anonymous: always, named: never } ]
- space-in-parens: [ 2, never ]
- space-infix-ops: [ 2, { int32Hint: false } ]
- space-return-throw-case: [ 2 ]
- space-unary-ops: [ 2 ]
- strict: [ 2, never ]
- valid-typeof: 2
- wrap-iife: [ 2, outside ]
- yoda: 0
-
- mocha/no-exclusive-tests: 2
- mocha/handle-done-callback: 2
+extends: '@elastic/kibana'
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000000000..bc57ede80ef45
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,36 @@
+
+
+
+
+**Kibana version**:
+
+**OS version**:
+
+**Original install method (e.g. download page, yum, from source, etc.)**:
+
+**Description of the problem including expected versus actual behavior**:
+
+**Steps to reproduce**:
+ 1.
+ 2.
+ 3.
+
+**Errors in browser console (if relevant)**:
+
+**Provide logs and/or server output (if relevant)**:
+
+
+
+**Describe the feature**:
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000000..5ff8531861a06
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,13 @@
+
+
+- Have you signed the [contributor license agreement](https://www.elastic.co/contributor-agreement)?
+- Have you followed the [contributor guidelines](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md)?
+- If submitting code, have you included unit tests that cover the changes?
+- If submitting code, have you tested and built your code locally prior to submission with `npm test && npm run build`?
+- If submitting code, is your pull request against master? Unless there is a good reason otherwise, we prefer pull requests against master and will backport as needed.
diff --git a/.gitignore b/.gitignore
index 56b859e0c6c38..4f697649a07d8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,6 @@
.aws-config.json
+.signing-config.json
+.ackrc
.DS_Store
.node_binaries
node_modules
@@ -10,7 +12,10 @@ target
.idea
*.iml
*.log
-/test/output
+/test/screenshots/diff
+/test/screenshots/failure
+/test/screenshots/session
+/test/screenshots/visual_regression_gallery.html
/esvm
.htpasswd
.eslintcache
diff --git a/.node-version b/.node-version
index e2e3067ddc5fd..cbe06cdbfc24c 100644
--- a/.node-version
+++ b/.node-version
@@ -1 +1 @@
-0.12.7
+4.4.4
diff --git a/.travis.yml b/.travis.yml
index b1f556e5d3c2d..40949008f0d35 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,28 +1,21 @@
language: node_js
-node_js: '0.12.7'
+node_js: 4
+env:
+ - CXX=g++-4.8
+addons:
+ apt:
+ sources:
+ - ubuntu-toolchain-r-test
+ packages:
+ - g++-4.8
+
install:
- npm install
-script: ./node_modules/.bin/grunt travis
-sudo: false
-addons:
- firefox: "40.0"
+ - npm run setup_kibana
+
cache:
directories:
- - esvm
- - node_modules
- - selenium
-before_cache:
-- rm -rf esvm/*/logs esvm/data_dir
-before_script:
-- export DISPLAY=:99.0
-- sh -e /etc/init.d/xvfb start
-notifications:
- email:
- - rashid.khan@elastic.co
- hipchat:
- rooms:
- secure: UKrVR+5KztHarodQruQe97UJfwftutD6RNdXlVkr+oIr2GqccisDIIN9pAzS/kxl+eAnP1uT6VHzc9YI/jgbrmiSkz3DHViw+MwDwY2aIDgI8aHEbd/4B2ihtb15+OYTVbb+lytyz4+W8A8hSmbkTR/P/uFIJ+EYcBeYZfw1elo=
- format: html
- on_success: change
- template:
- - ! '%{repository_slug}/%{branch} by %{author}: %{commit_message} (open)'
+ - node_modules
+ - ../kibana
+
+script: npm test
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 58903624be8e8..1dfc15573980d 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,8 +1,26 @@
-If you have a bugfix or new feature that you would like to contribute to Kibana, please **find or open an issue about it before you start working on it.** Talk about what you would like to do. It may be that somebody is already working on it, or that there are particular issues that you should know about before implementing the change.
+# Contributing to Kibana
+
+## How issues work
+At any given time the Kibana team at Elastic is working on dozens of features and enhancements to Kibana and other projects at Elastic. When you file an issue we'll take the time to digest it, consider solutions, and weigh its applicability to both the broad Kibana user base and our own goals for the project. Once we've completed that process we will assign the issue a priority.
+
+- **P1**: A high priority issue that affects almost all Kibana users. Bugs that would cause incorrect results, security issues and features that would vastly improve the user experience for everyone. Work arounds for P1s generally don't exist without a code change.
+- **P2**: A broadly applicable, high visibility, issue that enhances the usability of Kibana for a majority users.
+- **P3**: Nice-to-have bug fixes or functionality. Work arounds for P3 items generally exist.
+- **P4**: Niche and special interest issues that may not fit our core goals. We would take a high quality pull for this if implemented in such a way that it does not meaningfully impact other functionality or existing code. Issues may also be labeled P4 if they would be better implemented in Elasticsearch.
+- **P5**: Highly niche or in opposition to our core goals. Should usually be closed. This doesn't mean we wouldn't take a pull for it, but if someone really wanted this they would be better off working on a plugin. The Kibana team will usually not work on P5 issues but may be willing to assist plugin developers on IRC.
+
+#### How to express the importance of an issue
+Let's just get this out there: **Feel free to +1 an issue**. That said, a +1 isn't a vote. We keep up on highly commented issues, but comments are but one of many reasons we might, or might not, work on an issue. A solid write up of your use case is more likely to make your case than a comment that says *+10000*.
+
+#### My issue isn't getting enough attention
+First of all, sorry about that, we want you to have a great time with Kibana! You should join us on IRC ([#kibana](https://kiwiirc.com/client/irc.freenode.net/?#kibana) on freenode) and chat about it. Github is terrible for conversations. With that out of the way, there are a number of variables that go into deciding what to work on. These include priority, impact, difficulty, applicability to use cases, and last, and importantly: What we feel like working on.
+
+### I want to help!
+**Now we're talking**. If you have a bugfix or new feature that you would like to contribute to Kibana, please **find or open an issue about it before you start working on it.** Talk about what you would like to do. It may be that somebody is already working on it, or that there are particular issues that you should know about before implementing the change.
We enjoy working with contributors to get their code accepted. There are many approaches to fixing a problem and it is important to find the best approach before writing too much code.
-The process for contributing to any of the Elasticsearch repositories is similar.
+## How to contribute code
### Sign the contributor license agreement
@@ -31,11 +49,13 @@ Please make sure you have signed the [Contributor License Agreement](http://www.
- Start elasticsearch
+ Note: you need to have a java binary in `PATH` or set `JAVA_HOME`.
+
```sh
npm run elasticsearch
```
-- Start the development server.
+- Start the development server. _On Windows, you'll need you use Git Bash, Cygwin, or a similar shell that exposes the `sh` command. And to successfully build you'll need Cygwin optional packages zip, tar, and shasum._
```sh
npm start
@@ -54,6 +74,20 @@ optimize:
lazyPrebuild: false
```
+#### SSL
+
+When Kibana runs in development mode it will automatically use bundled SSL certificates. These certificates won't be trusted by your OS by default which will likely cause your browser to complain about the cert. You can deal with this in a few ways:
+
+ 1. Supply your own cert using the `config/kibana.dev.yml` file.
+ 1. Configure your OS to trust the cert:
+ - OSX: https://www.accuweaver.com/2014/09/19/make-chrome-accept-a-self-signed-certificate-on-osx/
+ - Window: http://stackoverflow.com/a/1412118
+ - Linux: http://unix.stackexchange.com/a/90607
+ 1. Click through the warning and accept future warnings.
+ 1. Disable SSL with the `--no-ssl` flag:
+ - `npm start -- --no-ssl`
+
+
#### Linting
A note about linting: We use [eslint](http://eslint.org) to check that the [styleguide](STYLEGUIDE.md) is being followed. It runs in a pre-commit hook and as a part of the tests, but most contributors integrate it with their code editors for real-time feedback.
@@ -77,68 +111,93 @@ Before running the tests you will need to install the projects dependencies as d
Once that is complete just run:
-```sh
-npm run test && npm run build
+```
+sh
+npm run test && npm run build -- --skip-os-packages
```
-Distributable packages can be found in `target/` after the build completes.
-
-#### Debugging test failures
+#### Debugging unit tests
The standard `npm run test` task runs several sub tasks and can take several minutes to complete, making debugging failures pretty painful. In order to ease the pain specialized tasks provide alternate methods for running the tests.
-
-
npm run test:quick
-
Runs both server and browser tests, but skips linting
-
npm run test:server or npm run test:browser
-
Runs the tests for just the server or browser
+`npm run test:quick`
+Runs both server and browser tests, but skips linting
-
npm run test:dev
-
- Initializes an environment for debugging the browser tests. Includes an dedicated instance of the kibana server for building the test bundle, and a karma server. When running this task the build is optimized for the first time and then a karma-owned instance of the browser is opened. Click the "debug" button to open a new tab that executes the unit tests.
-
-
-
-
+`npm run test:server`
+Run only the server tests
-### Functional UI Testing
+`npm run test:browser`
+Run only the browser tests. Coverage reports are available for browser tests by running `npm run test:coverage`. You can find the results under the `coverage/` directory that will be created upon completion.
-#### Handy references
+`npm run test:dev`
+Initializes an environment for debugging the browser tests. Includes an dedicated instance of the kibana server for building the test bundle, and a karma server. When running this task the build is optimized for the first time and then a karma-owned instance of the browser is opened. Click the "debug" button to open a new tab that executes the unit tests.
+![Browser test debugging](http://i.imgur.com/DwHxgfq.png)
-- https://theintern.github.io/
-- https://theintern.github.io/leadfoot/Element.html
+`npm run mocha [test file or dir]` or `npm run mocha:debug [test file or dir]`
+Run a one off test with the local project version of mocha, babel compilation, and optional debugging. Great
+for development and fixing individual tests.
-#### Running tests using npm task:
+#### Unit testing plugins
+This should work super if you're using the [Kibana plugin generator](https://github.com/elastic/generator-kibana-plugin). If you're not using the generator, well, you're on your own. We suggest you look at how the generator works.
-*The Selenium server that is started currently only runs the tests in Firefox*
+`npm run test:dev -- --kbnServer.testsBundle.pluginId=some_special_plugin --kbnServer.plugin-path=../some_special_plugin`
+Run the tests for just your particular plugin. Assuming you plugin lives outside of the `installedPlugins directory`, which it should.
-To runt the functional UI tests, execute the following command:
+#### Running browser automation tests:
-`npm run test:ui`
+*The Selenium server that is started currently only runs the tests in a recent version of Firefox.*
+*You can use the `PATH` environment variable to specify which version of Firefox to use.*
-The task above takes a little time to start the servers. You can also start the servers and leave them running, and then run the tests separately:
+The following will start Kibana, Elasticsearch and Selenium for you. To run the functional UI tests use the following commands
-`npm run test:ui:server` will start the server required to run the selenium tests, leave this open
+`npm run test:ui`
+Run the functional UI tests one time and exit. This is used by the CI systems and is great for quickly checking that things pass. It is essentially a combination of the next two tasks.
+
+`npm run test:ui:server`
+Start the server required for the `test:ui:runner` tasks. Once the server is started `test:ui:runner` can be run multiple times without waiting for the server to start.
-`npm run test:ui:runner` will run the frontend tests and close when complete
+`npm run test:ui:runner`
+Execute the front-end selenium tests. This requires the server started by the `test:ui:server` task.
-#### Running tests locally with your existing (and already running) ElasticSearch, Kibana, and Selenium Server:
+##### If you already have ElasticSearch, Kibana, and Selenium Server running:
-Set your es and kibana ports in `test/intern.js` to 9220 and 5620, respecitively. You can configure your Selenium server to run the tests on Chrome,IE, or other browsers here.
+Set your es and kibana ports in `test/intern.js` to 9220 and 5620, respectively. You can configure your Selenium server to run the tests on Chrome,IE, or other browsers here.
Once you've got the services running, execute the following:
-`npm run test:ui:runner`
+```
+sh
+npm run test:ui:runner
+```
-#### General notes:
+#### Browser automation notes:
- Using Page Objects pattern (https://theintern.github.io/intern/#writing-functional-test)
-- At least the initial tests for the Settings, Discover, and Visualize tabs all depend on a very specific set of logstash-type data (generated with makelogs). Since that is a static set of data, all the Discover and Visualize tests use a specific Absolute time range. This gaurantees the same results each run.
+- At least the initial tests for the Settings, Discover, and Visualize tabs all depend on a very specific set of logstash-type data (generated with makelogs). Since that is a static set of data, all the Discover and Visualize tests use a specific Absolute time range. This guarantees the same results each run.
- These tests have been developed and tested with Chrome and Firefox browser. In theory, they should work on all browsers (that's the benefit of Intern using Leadfoot).
- These tests should also work with an external testing service like https://saucelabs.com/ or https://www.browserstack.com/ but that has not been tested.
+- https://theintern.github.io/
+- https://theintern.github.io/leadfoot/module-leadfoot_Element.html
+
+#### Building OS packages
+
+Packages are built using fpm, pleaserun, dpkg, and rpm. fpm and pleaserun can be installed using gem. Package building has only been tested on Linux and is not supported on any other platform.
+```sh
+apt-get install ruby-dev rpm
+gem install fpm -v 1.5.0 # required by pleaserun 0.0.16
+gem install pleaserun -v 0.0.16 # higher versions fail at the moment
+npm run build -- --skip-archives
+```
+
+To specify a package to build you can add `rpm` or `deb` as an argument.
+```sh
+npm run build -- --rpm
+```
+
+Distributable packages can be found in `target/` after the build completes.
-### Submit a pull request
+## Submitting a pull request
Push your local changes to your forked copy of the repository and submit a pull request. In the pull request, describe what your changes do and mention the number of the issue where discussion has taken place, eg “Closes #123″.
@@ -150,8 +209,8 @@ Then sit back and wait. There will probably be discussion about the pull request
After a pull is submitted, it needs to get to review. If you have commit permission on the Kibana repo you will probably perform these steps while submitting your pull request. If not, a member of the elastic organization will do them for you, though you can help by suggesting a reviewer for your changes if you've interacted with someone while working on the issue.
-1. Assign the `review` tag. This signals to the team that someone needs to give this attention.
-1. Assign version tags. If the pull is related to an existing issue (and it should be!), that issue probably has a version tag (eg `4.0.1`) on it. Assign the same version tag to your pull. You may end up with 2 or more version tags if the changes requires backporting
+1. Assign the `review` label. This signals to the team that someone needs to give this attention.
+1. Do **not** assign a version label. Someone from Elastic staff will assign a version label, if necessary, when your pull request is ready to be merged.
1. Find someone to review your pull. Don't just pick any yahoo, pick the right person. The right person might be the original reporter of the issue, but it might also be the person most familiar with the code you've changed. If neither of those things apply, or your change is small in scope, try to find someone on the Kibana team without a ton of existing reviews on their plate. As a rule, most pulls will require 2 reviewers, but the first reviewer will pick the 2nd.
### Review engaged
@@ -160,9 +219,10 @@ So, you've been assigned a pull to review. What's that look like?
Remember, someone is blocked by a pull awaiting review, make it count. Be thorough, the more action items you catch in the first review, the less back and forth will be required, and the better chance the pull has of being successful. Don't you like success?
-1. **Understand the issue** that is being fixed, or the feature being added. Check the description on the pull, and check out the related issue. If you don't understand something, ask the person the submitter for clarification.
+1. **Understand the issue** that is being fixed, or the feature being added. Check the description on the pull, and check out the related issue. If you don't understand something, ask the submitter for clarification.
1. **Reproduce the bug** (or the lack of feature I guess?) in the destination branch, usually `master`. The referenced issue will help you here. If you're unable to reproduce the issue, contact the issue submitter for clarification
1. **Check out the pull** and test it. Is the issue fixed? Does it have nasty side effects? Try to create suspect inputs. If it operates on the value of a field try things like: strings (including an empty string), null, numbers, dates. Try to think of edge cases that might break the code.
+1. **Merge the target branch**. It is possible that tests or the linter have been updated in the target branch since the pull was submitted. Merging the pull could cause core to start failing.
1. **Read the code**. Understanding the changes will help you find additional things to test. Contact the submitter if you don't understand something.
1. **Go line-by-line**. Are there [style guide](https://github.com/elastic/kibana/blob/master/STYLEGUIDE.md) violations? Strangely named variables? Magic numbers? Do the abstractions make sense to you? Are things arranged in a testable way?
1. **Speaking of tests** Are they there? If a new function was added does it have tests? Do the tests, well, TEST anything? Do they just run the function or do they properly check the output?
diff --git a/FAQ.md b/FAQ.md
index fd45eddc12aad..c4710cab39d80 100644
--- a/FAQ.md
+++ b/FAQ.md
@@ -1,5 +1,15 @@
-**Kibana 3 Migration FAQ:**
+# Frequently asked questions
+**Q:** I'm getting `bin/node/bin/node: not found` but I can see the node binary in the package?
+**A:** Kibana 4 packages are architecture specific. Ensure you are using the correct package for your architecture.
+
+**Q:** Where do I go for support?
+**A:** Please join us at [discuss.elastic.co](https://discuss.elastic.co) with questions. Your problem might be a bug, but it might just be a misunderstanding, or feature we could improve. We're also available on Freenode in #kibana
+
+**Q:** Ok, we talked about it and its definitely a bug
+**A:** Doh, ok, let's get that fixed. File an issue on [github.com/elastic/kibana](https://github.com/elastic/kibana). I'd recommend reading the beginning of the CONTRIBUTING.md, just so you know how we'll handle the issue.
+
+### Kibana 3 Migration
**Q:** Where is feature X that I loved from Kibana 3?
**A:** It might be coming! We’ve published our immediate roadmap as tickets. Check out the beta milestones on GitHub to see if the feature you’re missing is coming soon.
@@ -12,6 +22,3 @@
**Q:** What happened to templated/scripted dashboards?
**A:** Check out the URL. The state of each app is stored there, including any filters, queries or columns. This should be a lot easier than constructing scripted dashboards. The encoding of the URL is RISON.
-**Q:** I'm getting `bin/node/bin/node: not found` but I can see the node binary in the package?
-**A:** Kibana 4 packages are architecture specific. Ensure you are using the correct package for your architecture.
-
diff --git a/Gruntfile.js b/Gruntfile.js
index a7867384a6faf..d6fa1c8a13819 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -1,18 +1,19 @@
-require('babel/register')(require('./src/optimize/babelOptions').node);
+const camelCase = require('lodash').camelCase;
+require('babel/register')(require('./src/optimize/babel_options').node);
module.exports = function (grunt) {
// set the config once before calling load-grunt-config
// and once during so that we have access to it via
// grunt.config.get() within the config files
- var config = {
+ const config = {
pkg: grunt.file.readJSON('package.json'),
root: __dirname,
src: __dirname + '/src',
- build: __dirname + '/build', // temporary build directory
+ buildDir: __dirname + '/build', // temporary build directory
plugins: __dirname + '/src/plugins',
server: __dirname + '/src/server',
target: __dirname + '/target', // location of the compressed build targets
- testUtilsDir: __dirname + '/src/testUtils',
+ testUtilsDir: __dirname + '/src/test_utils',
configFile: __dirname + '/src/config/kibana.yml',
karmaBrowser: (function () {
@@ -61,13 +62,17 @@ module.exports = function (grunt) {
'postcss-unique-selectors': '1.0.0',
'postcss-minify-selectors': '1.4.6',
'postcss-single-charset': '0.3.0',
- 'regenerator': '0.8.36'
+ 'regenerator': '0.8.36',
+ 'readable-stream': '2.1.0'
}
};
grunt.config.merge(config);
- config.userScriptsDir = __dirname + '/build/userScripts';
+ // must run before even services/platforms
+ grunt.config.set('build', require('./tasks/config/build')(grunt));
+
+ config.packageScriptsDir = __dirname + '/tasks/build/package_scripts';
// ensure that these run first, other configs need them
config.services = require('./tasks/config/services')(grunt);
config.platforms = require('./tasks/config/platforms')(grunt);
@@ -87,4 +92,5 @@ module.exports = function (grunt) {
// load task definitions
grunt.task.loadTasks('tasks');
grunt.task.loadTasks('tasks/build');
+ grunt.task.loadTasks('tasks/rebuild');
};
diff --git a/LICENSE.md b/LICENSE.md
index 484bbafe7b7ad..d049908847b87 100644
--- a/LICENSE.md
+++ b/LICENSE.md
@@ -1,4 +1,4 @@
-Copyright 2012–2014 Elasticsearch BV
+Copyright 2012–2015 Elasticsearch BV
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
diff --git a/README.md b/README.md
index ac077c92a2eb9..aba972f854b3d 100644
--- a/README.md
+++ b/README.md
@@ -1,20 +1,26 @@
-# Kibana 4.4.0-snapshot
-
-[![Build Status](https://travis-ci.org/elastic/kibana.svg?branch=master)](https://travis-ci.org/elastic/kibana?branch=master)
+# Kibana 5.0.0
Kibana is an open source ([Apache Licensed](https://github.com/elastic/kibana/blob/master/LICENSE.md)), browser based analytics and search dashboard for Elasticsearch. Kibana is a snap to setup and start using. Kibana strives to be easy to get started with, while also being flexible and powerful, just like Elasticsearch.
## Requirements
-- Elasticsearch version 2.1.0 or later
+- Elasticsearch master
- Kibana binary package
## Installation
* Download: [http://www.elastic.co/downloads/kibana](http://www.elastic.co/downloads/kibana)
+* Extract the files
* Run `bin/kibana` on unix, or `bin\kibana.bat` on Windows.
* Visit [http://localhost:5601](http://localhost:5601)
+
+## Upgrade from previous version
+
+* Move any custom configurations in your old kibana.yml to your new one
+* Reinstall plugins
+* Start or restart Kibana
+
## Quick Start
You're up and running! Fantastic! Kibana is now running on port 5601, so point your browser at http://YOURDOMAIN.com:5601.
@@ -35,9 +41,9 @@ Visit [Elastic.co](http://www.elastic.co/guide/en/kibana/current/index.html) for
For the daring, snapshot builds are available. These builds are created after each commit to the master branch, and therefore are not something you should run in production.
-| platform | | |
-| --- | --- | --- |
-| OSX | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.4.0-snapshot-darwin-x64.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.4.0-snapshot-darwin-x64.zip) |
-| Linux x64 | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.4.0-snapshot-linux-x64.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.4.0-snapshot-linux-x64.zip) |
-| Linux x86 | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.4.0-snapshot-linux-x86.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.4.0-snapshot-linux-x86.zip) |
-| Windows | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.4.0-snapshot-windows.tar.gz) | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-4.4.0-snapshot-windows.zip) |
+| platform | |
+| --- | --- |
+| OSX | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-5.0.0-snapshot-darwin-x64.tar.gz) |
+| Linux x64 | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-5.0.0-snapshot-linux-x64.tar.gz) [deb](https://download.elastic.co/kibana/kibana-snapshot/kibana_5.0.0-snapshot_amd64.deb) [rpm](https://download.elastic.co/kibana/kibana-snapshot/kibana-5.0.0_snapshot-1.x86_64.rpm) |
+| Linux x86 | [tar](http://download.elastic.co/kibana/kibana-snapshot/kibana-5.0.0-snapshot-linux-x86.tar.gz) [deb](https://download.elastic.co/kibana/kibana-snapshot/kibana_5.0.0-snapshot_i386.deb) [rpm](https://download.elastic.co/kibana/kibana-snapshot/kibana-5.0.0_snapshot-1.i386.rpm) |
+| Windows | [zip](http://download.elastic.co/kibana/kibana-snapshot/kibana-5.0.0-snapshot-windows.zip) |
diff --git a/STYLEGUIDE.md b/STYLEGUIDE.md
index 010de8e419b5c..03bbc7c68c725 100644
--- a/STYLEGUIDE.md
+++ b/STYLEGUIDE.md
@@ -1,767 +1,9 @@
This is a collection of style guides for Kibana projects. The include guides for the following:
-- [JavaScript](#javascript-style-guide)
-- [Kibana Project](#kibana-style-guide)
-- [Html](#html-style-guide)
-
-# JavaScript Style Guide
-
-## 2 Spaces for indention
-
-Use 2 spaces for indenting your code and swear an oath to never mix tabs and
-spaces - a special kind of hell is awaiting you otherwise.
-
-## Newlines
-
-Use UNIX-style newlines (`\n`), and a newline character as the last character
-of a file. Windows-style newlines (`\r\n`) are forbidden inside any repository.
-
-## No trailing whitespace
-
-Just like you brush your teeth after every meal, you clean up any trailing
-whitespace in your JS files before committing. Otherwise the rotten smell of
-careless neglect will eventually drive away contributors and/or co-workers.
-
-## Use Semicolons
-
-According to [scientific research][hnsemicolons], the usage of semicolons is
-a core value of our community. Consider the points of [the opposition][], but
-be a traditionalist when it comes to abusing error correction mechanisms for
-cheap syntactic pleasures.
-
-[the opposition]: http://blog.izs.me/post/2353458699/an-open-letter-to-javascript-leaders-regarding
-[hnsemicolons]: http://news.ycombinator.com/item?id=1547647
-
-## 120 characters per line
-
-Try to limit your lines to 80 characters. If it feels right, you can go up to 120 characters.
-
-## Use single quotes
-
-Use single quotes, unless you are writing JSON.
-
-*Right:*
-
-```js
-var foo = 'bar';
-```
-
-*Wrong:*
-
-```js
-var foo = "bar";
-```
-
-## Opening braces go on the same line
-
-Your opening braces go on the same line as the statement.
-
-*Right:*
-
-```js
-if (true) {
- console.log('winning');
-}
-```
-
-*Wrong:*
-
-```js
-if (true)
-{
- console.log('losing');
-}
-```
-
-Also, notice the use of whitespace before and after the condition statement.
-
-## Always use braces for multi-line code
-
-*Right:*
-
-```js
-if (err) {
- return cb(err);
-}
-```
-
-*Wrong:*
-
-```js
-if (err)
- return cb(err);
-```
-
-## Prefer multi-line conditionals
-
-But single-line conditionals are allowed for short lines
-
-*Preferred:*
-
-```js
-if (err) {
- return cb(err);
-}
-```
-
-*Allowed:*
-
-```js
-if (err) return cb(err);
-```
-
-## Declare one variable per var statement
-
-Declare one variable per var statement, it makes it easier to re-order the
-lines. However, ignore [Crockford][crockfordconvention] when it comes to
-declaring variables deeper inside a function, just put the declarations wherever
-they make sense.
-
-*Right:*
-
-```js
-var keys = ['foo', 'bar'];
-var values = [23, 42];
-
-var object = {};
-while (keys.length) {
- var key = keys.pop();
- object[key] = values.pop();
-}
-```
-
-*Wrong:*
-
-```js
-var keys = ['foo', 'bar'],
- values = [23, 42],
- object = {},
- key;
-
-while (keys.length) {
- key = keys.pop();
- object[key] = values.pop();
-}
-```
-
-[crockfordconvention]: http://javascript.crockford.com/code.html
-
-## Use lowerCamelCase for variables, properties and function names
-
-Variables, properties and function names should use `lowerCamelCase`. They
-should also be descriptive. Single character variables and uncommon
-abbreviations should generally be avoided.
-
-*Right:*
-
-```js
-var adminUser = db.query('SELECT * FROM users ...');
-```
-
-*Wrong:*
-
-```js
-var admin_user = db.query('SELECT * FROM users ...');
-```
-
-## Use UpperCamelCase for class names
-
-Class names should be capitalized using `UpperCamelCase`.
-
-*Right:*
-
-```js
-function BankAccount() {
-}
-```
-
-*Wrong:*
-
-```js
-function bank_Account() {
-}
-```
-
-## Use UPPERCASE for Constants
-
-Constants should be declared as regular variables or static class properties,
-using all uppercase letters.
-
-Node.js / V8 actually supports mozilla's [const][const] extension, but
-unfortunately that cannot be applied to class members, nor is it part of any
-ECMA standard.
-
-*Right:*
-
-```js
-var SECOND = 1 * 1000;
-
-function File() {
-}
-File.FULL_PERMISSIONS = 0777;
-```
-
-*Wrong:*
-
-```js
-const SECOND = 1 * 1000;
-
-function File() {
-}
-File.fullPermissions = 0777;
-```
-
-[const]: https://developer.mozilla.org/en/JavaScript/Reference/Statements/const
-
-## Magic numbers
-
-These are numbers (or other values) simply used in line in your code. **Do not use these**, give them a variable name so they can be understood and changed easily.
-
-*Right:*
-
-```js
-var minWidth = 300;
-
-if (width < minWidth) {
- ...
-}
-```
-
-*Wrong:*
-
-```js
-if (width < 300) {
- ...
-}
-```
-
-## Global definitions
-
-Don't do this. Everything should be wrapped in a module that can be depended on by other modules. Even things as simple as a single value should be a module.
-
-## Function definitions
-
-Prefer the use of function declarations over function expressions. Function expressions are allowed, but should usually be avoided.
-
-Also, keep function definitions above other code instead of relying on function hoisting.
-
-*Preferred:*
-
-```js
-function myFunc() {
- ...
-}
-```
-
-*Allowed:*
-
-```js
-var myFunc = function () {
- ...
-};
-```
-
-## Object / Array creation
-
-Use trailing commas and put *short* declarations on a single line. Only quote
-keys when your interpreter complains:
-
-*Right:*
-
-```js
-var a = ['hello', 'world'];
-var b = {
- good: 'code',
- 'is generally': 'pretty'
-};
-```
-
-*Wrong:*
-
-```js
-var a = [
- 'hello', 'world'
-];
-var b = {"good": 'code'
- , is generally: 'pretty'
- };
-```
-
-## Object / Array iterations, transformations and operations
-
-Use native ES5 methods to iterate and transform arrays and objects where possible. Do not use `for` and `while` loops.
-
-Use descriptive variable names in the closures.
-
-Use a utility library as needed and where it will make code more comprehensible.
-
-*Right:*
-
-```js
-var userNames = users.map(function (user) {
- return user.name;
-});
-
-// examples where lodash makes the code more readable
-var userNames = _.pluck(users, 'name');
-```
-
-*Wrong:*
-
-```js
-var userNames = [];
-for (var i = 0; i < users.length; i++) {
- userNames.push(users[i].name);
-}
-```
-
-## Use the === operator
-
-Programming is not about remembering [stupid rules][comparisonoperators]. Use
-the triple equality operator as it will work just as expected.
-
-*Right:*
-
-```js
-var a = 0;
-if (a !== '') {
- console.log('winning');
-}
-
-```
-
-*Wrong:*
-
-```js
-var a = 0;
-if (a == '') {
- console.log('losing');
-}
-```
-
-[comparisonoperators]: https://developer.mozilla.org/en/JavaScript/Reference/Operators/Comparison_Operators
-
-## Only use ternary operators for small, simple code
-
-And **never** use multiple ternaries together
-
-*Right:*
-
-```js
-var foo = (a === b) ? 1 : 2;
-```
-
-*Wrong:*
-
-```js
-var foo = (a === b) ? 1 : (a === c) ? 2 : 3;
-```
-
-## Do not extend built-in prototypes
-
-Do not extend the prototype of native JavaScript objects. Your future self will
-be forever grateful.
-
-*Right:*
-
-```js
-var a = [];
-if (!a.length) {
- console.log('winning');
-}
-```
-
-*Wrong:*
-
-```js
-Array.prototype.empty = function() {
- return !this.length;
-}
-
-var a = [];
-if (a.empty()) {
- console.log('losing');
-}
-```
-
-## Use descriptive conditions
-
-Any non-trivial conditions should be assigned to a descriptively named variables, broken into
-several names variables, or converted to be a function:
-
-*Right:*
-
-```js
-var thing = ...;
-var isShape = thing instanceof Shape;
-var notSquare = !(thing instanceof Square);
-var largerThan10 = isShape && thing.size > 10;
-
-if (isShape && notSquare && largerThan10) {
- console.log('some big polygon');
-}
-```
-
-*Wrong:*
-
-```js
-if (
- thing instanceof Shape
- && !(thing instanceof Square)
- && thing.size > 10
-) {
- console.log('bigger than ten?? Woah!');
-}
-```
-
-## Name regular expressions
-
-*Right:*
-
-```js
-var validPasswordRE = /^(?=.*\d).{4,}$/;
-
-if (password.length >= 4 && validPasswordRE.test(password)) {
- console.log('password is valid');
-}
-```
-
-*Wrong:*
-
-```js
-if (password.length >= 4 && /^(?=.*\d).{4,}$/.test(password)) {
- console.log('losing');
-}
-```
-
-## Write small functions
-
-Keep your functions short. A good function fits on a slide that the people in
-the last row of a big room can comfortably read. So don't count on them having
-perfect vision and limit yourself to ~15 lines of code per function.
-
-## Return early from functions
-
-To avoid deep nesting of if-statements, always return a function's value as early
-as possible.
-
-*Right:*
-
-```js
-function isPercentage(val) {
- if (val < 0) return false;
- if (val > 100) return false;
-
- return true;
-}
-```
-
-*Wrong:*
-
-```js
-function isPercentage(val) {
- if (val >= 0) {
- if (val < 100) {
- return true;
- } else {
- return false;
- }
- } else {
- return false;
- }
-}
-```
-
-Or for this particular example it may also be fine to shorten things even
-further:
-
-```js
-function isPercentage(val) {
- var isInRange = (val >= 0 && val <= 100);
- return isInRange;
-}
-```
-
-## Chaining operations
-
-When using a chaining syntax (jquery or promises, for example), do not indent the subsequent chained operations, unless there is a logical grouping in them.
-
-Also, if the chain is long, each method should be on a new line.
-
-*Right:*
-
-```js
-$('.someClass')
-.addClass('another-class')
-.append(someElement)
-```
-
-```js
-d3.selectAll('g.bar')
-.enter()
- .append('thing')
- .data(anything)
- .exit()
-.each(function() ... )
-```
-
-```js
-$http.get('/info')
-.then(({ data }) => this.transfromInfo(data))
-.then((transformed) => $http.post('/new-info', transformed))
-.then(({ data }) => console.log(data));
-```
-
-*Wrong:*
-
-```js
-$('.someClass')
- .addClass('another-class')
- .append(someElement)
-```
-
-```js
-d3.selectAll('g.bar')
-.enter().append('thing').data(anything).exit()
-.each(function() ... )
-```
-
-```js
-$http.get('/info')
- .then(({ data }) => this.transfromInfo(data))
- .then((transformed) => $http.post('/new-info', transformed))
- .then(({ data }) => console.log(data));
-```
-
-## Name your closures
-
-Feel free to give your closures a descriptive name. It shows that you care about them, and
-will produce better stack traces, heap and cpu profiles.
-
-*Right:*
-
-```js
-req.on('end', function onEnd() {
- console.log('winning');
-});
-```
-
-*Wrong:*
-
-```js
-req.on('end', function() {
- console.log('losing');
-});
-```
-
-## No nested closures
-
-Use closures, but don't nest them. Otherwise your code will become a mess.
-
-*Right:*
-
-```js
-setTimeout(function() {
- client.connect(afterConnect);
-}, 1000);
-
-function afterConnect() {
- console.log('winning');
-}
-```
-
-*Wrong:*
-
-```js
-setTimeout(function() {
- client.connect(function() {
- console.log('losing');
- });
-}, 1000);
-```
-
-## Use slashes for comments
-
-Use slashes for both single line and multi line comments. Try to write
-comments that explain higher level mechanisms or clarify difficult
-segments of your code. **Don't use comments to restate trivial things**.
-
-***Exception:*** Comment blocks describing a function and it's arguments (docblock) should start with `/**`, contain a single `*` at the begining of each line, and end with `*/`.
-
-*Right:*
-
-```js
-// 'ID_SOMETHING=VALUE' -> ['ID_SOMETHING=VALUE', 'SOMETHING', 'VALUE']
-var matches = item.match(/ID_([^\n]+)=([^\n]+)/));
-
-/**
- * Fetches a user from...
- * @param {string} id - id of the user
- * @return {Promise}
- */
-function loadUser(id) {
- // This function has a nasty side effect where a failure to increment a
- // redis counter used for statistics will cause an exception. This needs
- // to be fixed in a later iteration.
-
- ...
-}
-
-var isSessionValid = (session.expires < Date.now());
-if (isSessionValid) {
- ...
-}
-```
-
-*Wrong:*
-
-```js
-// Execute a regex
-var matches = item.match(/ID_([^\n]+)=([^\n]+)/));
-
-// Usage: loadUser(5, function() { ... })
-function loadUser(id, cb) {
- // ...
-}
-
-// Check if the session is valid
-var isSessionValid = (session.expires < Date.now());
-// If the session is valid
-if (isSessionValid) {
- // ...
-}
-```
-
-## Do not comment out code
-
-We use a version management system. If a line of code is no longer needed, remove it, don't simply comment it out.
-
-## Classes/Constructors and Inheritance
-
-While JavaScript it is not always considered an object-oriented language, it does have the building blocks for writing object oriented code. Of course, as with all things JavaScript, there are many ways this can be accomplished. Generally, we try to err on the side of readability.
-
-### Capitalized function definition as Constructors
-
-When Defining a Class/Constructor, use the function definition syntax.
-
-*Right:*
-```js
-function ClassName() {
-
-}
-```
-
-*Wrong:*
-```js
-var ClassName = function () {};
-```
-
-### Inhertiance should be done with a utility
-
-While you can do it with pure JS, a utility will remove a lot of boilerplate, and be more readable and functional.
-
-*Right:*
-
-```js
-// uses a lodash inherits mixin
-// inheritance is defined first - it's easier to read and the function will be hoisted
-_.class(Square).inherits(Shape);
-
-function Square(width, height) {
- Square.Super.call(this);
-}
-```
-
-*Wrong:*
-
-```js
-function Square(width, height) {
- this.width = width;
- this.height = height;
-}
-
-Square.prototype = Object.create(Shape);
-```
-
-### Keep Constructors Small
-
-It is often the case that there are properties that can't be defined on the prototype, or work that needs to be done to completely create an object (like call it's Super class). This is all that should be done within constructors.
-
-Try to follow the [Write small functions](#write-small-functions) rule here too.
-
-### Use the prototype
-
-If a method/property *can* go on the prototype, it probably should.
-
-```js
-function Square() {
- ...
-}
-
-/**
- * method does stuff
- * @return {undefined}
- */
-Square.prototype.method = function () {
- ...
-}
-```
-
-### Handling scope and aliasing `this`
-
-When creating a prototyped class, each method should almost always start with:
-
-`var self = this;`
-
-With the exception of very short methods (roughly 3 lines or less), `self` should always be used in place of `this`.
-
-Avoid the use of `bind`
-
-*Right:*
-
-```js
-Square.prototype.doFancyThings = function () {
- var self = this;
-
- somePromiseUtil()
- .then(function (result) {
- self.prop = result.prop;
- });
-}
-```
-
-*Wrong:*
-
-```js
-Square.prototype.doFancyThings = function () {
- somePromiseUtil()
- .then(function (result) {
- this.prop = result.prop;
- }).bind(this);
-}
-```
-
-*Allowed:*
-
-```js
-Square.prototype.area = function () {
- return this.width * this.height;
-}
-```
-
-## Object.freeze, Object.preventExtensions, Object.seal, with, eval
-
-Crazy shit that you will probably never need. Stay away from it.
-
-## Getters and Setters
-
-Feel free to use getters that are free from [side effects][sideeffect], like
-providing a length property for a collection class.
-
-Do not use setters, they cause more problems for people who try to use your
-software than they can solve.
-
-[sideeffect]: http://en.wikipedia.org/wiki/Side_effect_(computer_science)
+- [JavaScript](style_guides/js_style_guide.md)
+- [CSS](style_guides/css_style_guide.md)
+- [HTML](style_guides/html_style_guide.md)
+- [API](style_guides/api_style_guide.md)
# Kibana Style Guide
@@ -775,7 +17,7 @@ Several already exist, and can be found in `src/kibana/utils/_mixins.js`
## Filenames
-All filenames should use `snake_case` and *can* start with an underscore if the module is not intended to be used outside of it's containing module.
+All filenames should use `snake_case` and *can* start with an underscore if the module is not intended to be used outside of its containing module.
*Right:*
- `src/kibana/index_patterns/index_pattern.js`
@@ -825,7 +67,7 @@ Angular modules are defined using a custom require module named `ui/modules`. It
var app = require('ui/modules').get('app/namespace');
```
-`app` above is a reference to an Angular module, and can be used to define controllers, providers and anything else used in Angular.
+`app` above is a reference to an Angular module, and can be used to define controllers, providers and anything else used in Angular. While you can use this module to create/get any module with ui/modules, we generally use the "kibana" module for everything.
### Private modules
@@ -838,6 +80,8 @@ app.controller('myController', function($scope, otherDeps, Private) {
});
```
+*Use `Private` modules for everything except directives, filters, and controllers.*
+
### Promises
A more robust version of Angular's `$q` service is available as `Promise`. It can be used in the same way as `$q`, but it comes packaged with several utility methods that provide many of the same useful utilities as Bluebird.
@@ -856,7 +100,7 @@ app.service('CustomService', function(Promise, otherDeps) {
### Routes
-Angular routes are defined using a custom require modules named `routes` that remove much of the required boilerplate.
+Angular routes are defined using a custom require module named `routes` that remove much of the required boilerplate.
```js
require('ui/routes')
@@ -864,59 +108,3 @@ require('ui/routes')
// angular route code goes here
});
```
-
-# Html Style Guide
-
-## Multiple attribute values
-
-When a node has multiple attributes that would cause it to exceed the line character limit, each attribute including the first should be on its own line with a single indent. Also, when a node that is styled in this way has child nodes, there should be a blank line between the openening parent tag and the first child tag.
-
-```
-
-
-
-
- ...
-
-```
-
-# Api Style Guide
-
-## Paths
-
-API routes must start with the `/api/` path segment, and should be followed by the plugin id if applicable:
-
-*Right:* `/api/marvel/v1/nodes`
-*Wrong:* `/marvel/api/v1/nodes`
-
-## Versions
-
-Kibana won't be supporting multiple API versions, so API's should not define a version.
-
-*Right:* `/api/kibana/index_patterns`
-*Wrong:* `/api/kibana/v1/index_patterns`
-
-## snake_case
-
-Kibana uses `snake_case` for the entire API, just like Elasticsearch. All urls, paths, query string parameters, values, and bodies should be `snake_case` formatted.
-
-*Right:*
-```
-POST /api/kibana/index_patterns
-{
- "id": "...",
- "time_field_name": "...",
- "fields": [
- ...
- ]
-}
-```
-
-# Attribution
-
-This JavaScript guide forked from the [node style guide](https://github.com/felixge/node-style-guide) created by [Felix Geisendörfer](http://felixge.de/) and is
-licensed under the [CC BY-SA 3.0](http://creativecommons.org/licenses/by-sa/3.0/)
-license.
diff --git a/bin/kibana b/bin/kibana
index 5f2a124b801e8..1606ad2813025 100755
--- a/bin/kibana
+++ b/bin/kibana
@@ -21,5 +21,4 @@ if [ ! -x "$NODE" ]; then
exit 1
fi
-exec "${NODE}" "${DIR}/src/cli" ${@}
-
+exec "${NODE}" $NODE_OPTIONS "${DIR}/src/cli" ${@}
diff --git a/bin/kibana-plugin b/bin/kibana-plugin
new file mode 100755
index 0000000000000..7e8481fa71a2e
--- /dev/null
+++ b/bin/kibana-plugin
@@ -0,0 +1,24 @@
+#!/bin/sh
+SCRIPT=$0
+
+# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
+while [ -h "$SCRIPT" ] ; do
+ ls=$(ls -ld "$SCRIPT")
+ # Drop everything prior to ->
+ link=$(expr "$ls" : '.*-> \(.*\)$')
+ if expr "$link" : '/.*' > /dev/null; then
+ SCRIPT="$link"
+ else
+ SCRIPT=$(dirname "$SCRIPT")/"$link"
+ fi
+done
+
+DIR="$(dirname "${SCRIPT}")/.."
+NODE="${DIR}/node/bin/node"
+test -x "$NODE" || NODE=$(which node)
+if [ ! -x "$NODE" ]; then
+ echo "unable to find usable node.js executable."
+ exit 1
+fi
+
+exec "${NODE}" $NODE_OPTIONS "${DIR}/src/cli_plugin" ${@}
diff --git a/bin/kibana-plugin.bat b/bin/kibana-plugin.bat
new file mode 100644
index 0000000000000..9d8bdc4778129
--- /dev/null
+++ b/bin/kibana-plugin.bat
@@ -0,0 +1,29 @@
+@echo off
+
+SETLOCAL
+
+set SCRIPT_DIR=%~dp0
+for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
+
+set NODE=%DIR%\node\node.exe
+
+WHERE /Q node
+IF %ERRORLEVEL% EQU 0 (
+ for /f "delims=" %%i in ('WHERE node') do set SYS_NODE=%%i
+)
+
+If Not Exist "%NODE%" (
+ IF Exist "%SYS_NODE%" (
+ set "NODE=%SYS_NODE%"
+ ) else (
+ Echo unable to find usable node.js executable.
+ Exit /B 1
+ )
+)
+
+TITLE Kibana Server
+"%NODE%" %NODE_OPTIONS% "%DIR%\src\cli_plugin" %*
+
+:finally
+
+ENDLOCAL
diff --git a/bin/kibana.bat b/bin/kibana.bat
index 53a0df663b9e6..2c39d080bf0ad 100644
--- a/bin/kibana.bat
+++ b/bin/kibana.bat
@@ -6,11 +6,15 @@ set SCRIPT_DIR=%~dp0
for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
set NODE=%DIR%\node\node.exe
-for /f "delims=" %%i in ('WHERE node') do set SYS_NODE=%%i
+
+WHERE /Q node
+IF %ERRORLEVEL% EQU 0 (
+ for /f "delims=" %%i in ('WHERE node') do set SYS_NODE=%%i
+)
If Not Exist "%NODE%" (
IF Exist "%SYS_NODE%" (
- set NODE=%SYS_NODE%
+ set "NODE=%SYS_NODE%"
) else (
Echo unable to find usable node.js executable.
Exit /B 1
@@ -18,7 +22,7 @@ If Not Exist "%NODE%" (
)
TITLE Kibana Server
-"%NODE%" "%DIR%\src\cli" %*
+"%NODE%" %NODE_OPTIONS% "%DIR%\src\cli" %*
:finally
diff --git a/config/kibana.yml b/config/kibana.yml
index 9f50057cb2f35..2351c3eb70d95 100644
--- a/config/kibana.yml
+++ b/config/kibana.yml
@@ -1,80 +1,92 @@
-# Kibana is served by a back end server. This controls which port to use.
+# Kibana is served by a back end server. This setting specifies the port to use.
# server.port: 5601
-# The host to bind the server to.
+# This setting specifies the IP address of the back end server.
# server.host: "0.0.0.0"
-# A value to use as a XSRF token. This token is sent back to the server on each request
-# and required if you want to execute requests from other clients (like curl).
-# server.xsrf.token: ""
-
-# If you are running kibana behind a proxy, and want to mount it at a path,
-# specify that path here. The basePath can't end in a slash.
+# Enables you to specify a path to mount Kibana at if you are running behind a proxy. This setting
+# cannot end in a slash.
# server.basePath: ""
-# The Elasticsearch instance to use for all your queries.
+# The maximum payload size in bytes for incoming server requests.
+# server.maxPayloadBytes: 1048576
+
+# The Kibana server's name. This is used for display purposes.
+# server.name: "your-hostname"
+
+# The URL of the Elasticsearch instance to use for all your queries.
# elasticsearch.url: "http://localhost:9200"
-# preserve_elasticsearch_host true will send the hostname specified in `elasticsearch`. If you set it to false,
-# then the host you use to connect to *this* Kibana instance will be sent.
+# When this setting’s value is true Kibana uses the hostname specified in the server.host
+# setting. When the value of this setting is false, Kibana uses the hostname of the host
+# that connects to this Kibana instance.
# elasticsearch.preserveHost: true
-# Kibana uses an index in Elasticsearch to store saved searches, visualizations
-# and dashboards. It will create a new index if it doesn't already exist.
+# Kibana uses an index in Elasticsearch to store saved searches, visualizations and
+# dashboards. Kibana creates a new index if the index doesn’t already exist.
# kibana.index: ".kibana"
# The default application to load.
# kibana.defaultAppId: "discover"
-# If your Elasticsearch is protected with basic auth, these are the user credentials
-# used by the Kibana server to perform maintenance on the kibana_index at startup. Your Kibana
-# users will still need to authenticate with Elasticsearch (which is proxied through
-# the Kibana server)
+# If your Elasticsearch is protected with basic authentication, these settings provide
+# the username and password that the Kibana server uses to perform maintenance on the Kibana
+# index at startup. Your Kibana users still need to authenticate with Elasticsearch, which
+# is proxied through the Kibana server.
# elasticsearch.username: "user"
# elasticsearch.password: "pass"
-# SSL for outgoing requests from the Kibana Server to the browser (PEM formatted)
+# Paths to the PEM-format SSL certificate and SSL key files, respectively. These
+# files enable SSL for outgoing requests from the Kibana server to the browser.
# server.ssl.cert: /path/to/your/server.crt
# server.ssl.key: /path/to/your/server.key
-# Optional setting to validate that your Elasticsearch backend uses the same key files (PEM formatted)
+# Optional settings that provide the paths to the PEM-format SSL certificate and key files.
+# These files validate that your Elasticsearch backend uses the same key files.
# elasticsearch.ssl.cert: /path/to/your/client.crt
# elasticsearch.ssl.key: /path/to/your/client.key
-# If you need to provide a CA certificate for your Elasticsearch instance, put
-# the path of the pem file here.
+# Optional setting that enables you to specify a path to the PEM file for the certificate
+# authority for your Elasticsearch instance.
# elasticsearch.ssl.ca: /path/to/your/CA.pem
-# Set to false to have a complete disregard for the validity of the SSL
-# certificate.
+# To disregard the validity of SSL certificates, change this setting’s value to false.
# elasticsearch.ssl.verify: true
-# Time in milliseconds to wait for elasticsearch to respond to pings, defaults to
-# request_timeout setting
+# Time in milliseconds to wait for Elasticsearch to respond to pings. Defaults to the value of
+# the elasticsearch.requestTimeout setting.
# elasticsearch.pingTimeout: 1500
-# Time in milliseconds to wait for responses from the back end or elasticsearch.
-# This must be > 0
-# elasticsearch.requestTimeout: 300000
+# Time in milliseconds to wait for responses from the back end or Elasticsearch. This value
+# must be a positive integer.
+# elasticsearch.requestTimeout: 30000
-# Time in milliseconds for Elasticsearch to wait for responses from shards.
-# Set to 0 to disable.
+# List of Kibana client-side headers to send to Elasticsearch. To send *no* client-side
+# headers, set this value to [] (an empty list).
+# elasticsearch.requestHeadersWhitelist: [ authorization ]
+
+# Time in milliseconds for Elasticsearch to wait for responses from shards. Set to 0 to disable.
# elasticsearch.shardTimeout: 0
-# Time in milliseconds to wait for Elasticsearch at Kibana startup before retrying
+# Time in milliseconds to wait for Elasticsearch at Kibana startup before retrying.
# elasticsearch.startupTimeout: 5000
-# Set the path to where you would like the process id file to be created.
+# Specifies the path where Kibana creates the process ID file.
# pid.file: /var/run/kibana.pid
-# If you would like to send the log output to a file you can set the path below.
+# Enables you specify a file where Kibana stores log output.
# logging.dest: stdout
-# Set this to true to suppress all logging output.
+# Set the value of this setting to true to suppress all logging output.
# logging.silent: false
-# Set this to true to suppress all logging output except for error messages.
+# Set the value of this setting to true to suppress all logging output other than error messages.
# logging.quiet: false
-# Set this to true to log all events, including system usage information and all requests.
+# Set the value of this setting to true to log all events, including system usage information
+# and all requests.
# logging.verbose: false
+
+# Set the interval in milliseconds to sample system and process performance
+# metrics. Minimum is 100ms. Defaults to 5000.
+# ops.interval: 5000
diff --git a/docs/advanced-settings.asciidoc b/docs/advanced-settings.asciidoc
index e649d7dc5d8a8..646a6860821c7 100644
--- a/docs/advanced-settings.asciidoc
+++ b/docs/advanced-settings.asciidoc
@@ -1,6 +1,8 @@
[[kibana-settings-reference]]
-WARNING: Modifying the following settings can signficantly affect Kibana's performance and cause problems that are difficult to diagnose. Setting a property's value to a blank field will revert to the default behavior, which may not be compatible with other configuration settings. Deleting a custom setting removes it from Kibana permanently.
+WARNING: Modifying the following settings can signficantly affect Kibana's performance and cause problems that are
+difficult to diagnose. Setting a property's value to a blank field will revert to the default behavior, which may not be
+compatible with other configuration settings. Deleting a custom setting removes it from Kibana permanently.
.Kibana Settings Reference
[horizontal]
@@ -8,30 +10,45 @@ WARNING: Modifying the following settings can signficantly affect Kibana's perfo
`sort:options`:: Options for the Elasticsearch https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-sort.html[sort] parameter.
`dateFormat`:: The format to use for displaying pretty-formatted dates.
`dateFormat:tz`:: The timezone that Kibana uses. The default value of `Browser` uses the timezone detected by the browser.
-`dateFormat:scaled`:: These values define the format used to render ordered time-based data. Formatted timestamps must adapt to the interval between measurements. Keys are http://en.wikipedia.org/wiki/ISO_8601#Time_intervals[ISO8601 intervals].
+`dateFormat:scaled`:: These values define the format used to render ordered time-based data. Formatted timestamps must
+`dateFormat:dow`:: This property defines what day weeks should start on.
+adapt to the interval between measurements. Keys are http://en.wikipedia.org/wiki/ISO_8601#Time_intervals[ISO8601 intervals].
`defaultIndex`:: Default is `null`. This property specifies the default index.
-`metaFields`:: An array of fields outside of `_source`. Kibana merges these fields into the document when displaying the document.
+`metaFields`:: An array of fields outside of `_source`. Kibana merges these fields into the document when displaying the
+document.
+`defaultColumns`:: Default is `_source`. Defines the columns that appear by default on the Discover page.
`discover:sampleSize`:: The number of rows to show in the Discover table.
-`doc_table:highlight`:: Highlight results in Discover and Saved Searches Dashboard. Highlighing makes request slow when working on big documents. Set this property to `false` to disable highlighting.
-`courier:maxSegmentCount`:: Kibana splits requests in the Discover app into segments to limit the size of requests sent to the Elasticsearch cluster. This setting constrains the length of the segment list. Long segment lists can significantly increase request processing time.
+`doc_table:highlight`:: Highlight results in Discover and Saved Searches Dashboard. Highlighing makes request slow when
+working on big documents. Set this property to `false` to disable highlighting.
+`courier:maxSegmentCount`:: Kibana splits requests in the Discover app into segments to limit the size of requests sent to
+the Elasticsearch cluster. This setting constrains the length of the segment list. Long segment lists can significantly
+increase request processing time.
`fields:popularLimit`:: This setting governs how many of the top most popular fields are shown.
`histogram:barTarget`:: When date histograms use the `auto` interval, Kibana attempts to generate this number of bars.
-`histogram:maxBars`:: Date histograms are not generated with more bars than the value of this property, scaling values when necessary.
-`visualization:tileMap:maxPrecision`:: The maximum geoHash precision displayed on tile maps: 7 is high, 10 is very high, 12 is the maximum. http://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geohashgrid-aggregation.html#_cell_dimensions_at_the_equator[Explanation of cell dimensions].
+`histogram:maxBars`:: Date histograms are not generated with more bars than the value of this property, scaling values
+when necessary.
+`visualization:tileMap:maxPrecision`:: The maximum geoHash precision displayed on tile maps: 7 is high, 10 is very high,
+12 is the maximum. http://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-geohashgrid-aggregation.html#_cell_dimensions_at_the_equator[Explanation of cell dimensions].
`visualization:tileMap:WMSdefaults`:: Default properties for the WMS map server support in the tile map.
`visualization:colorMapping`:: Maps values to specified colors within visualizations.
`visualization:loadingDelay`:: Time to wait before dimming visualizations during query.
`csv:separator`:: A string that serves as the separator for exported values.
`csv:quoteValues`:: Set this property to `true` to quote exported values.
-`history:limit`:: In fields that have history, such as query inputs, the value of this property limits how many recent values are shown.
-`shortDots:enable`:: Set this property to `true` to shorten long field names in visualizations. For example, instead of `foo.bar.baz`, show `f.b.baz`.
-`truncate:maxHeight`:: This property specifies the maximum height that a cell occupies in a table. A value of 0 disables truncation.
-`indexPattern:fieldMapping:lookBack`:: The value of this property sets the number of recent matching patterns to query the field mapping for index patterns with names that contain timestamps.
-`format:defaultTypeMap`:: A map of the default format name for each field type. Field types that are not explicitly mentioned use "_default_".
+`history:limit`:: In fields that have history, such as query inputs, the value of this property limits how many recent
+values are shown.
+`shortDots:enable`:: Set this property to `true` to shorten long field names in visualizations. For example, instead of
+`foo.bar.baz`, show `f.b.baz`.
+`truncate:maxHeight`:: This property specifies the maximum height that a cell occupies in a table. A value of 0 disables
+truncation.
+`indexPattern:fieldMapping:lookBack`:: The value of this property sets the number of recent matching patterns to query the
+field mapping for index patterns with names that contain timestamps.
+`format:defaultTypeMap`:: A map of the default format name for each field type. Field types that are not explicitly
+mentioned use "_default_".
`format:number:defaultPattern`:: Default numeral format for the "number" format.
`format:bytes:defaultPattern`:: Default numeral format for the "bytes" format.
`format:percent:defaultPattern`:: Default numeral format for the "percent" format.
`format:currency:defaultPattern`:: Default numeral format for the "currency" format.
+`savedObjects:perPage`:: The number of objects shown on each page of the list of saved objects. The default value is 5.
`timepicker:timeDefaults`:: The default time filter selection.
`timepicker:refreshIntervalDefaults`:: The time filter's default refresh interval.
-`dashboard:defaultDarkTheme`:: Set this property to `true` to make new dashboards use the dark theme by default.
\ No newline at end of file
+`dashboard:defaultDarkTheme`:: Set this property to `true` to make new dashboards use the dark theme by default.
diff --git a/docs/apps.asciidoc b/docs/apps.asciidoc
index a17d113147074..747619d14d4e3 100644
--- a/docs/apps.asciidoc
+++ b/docs/apps.asciidoc
@@ -1,5 +1,5 @@
[[kibana-apps]]
-== Kibana Apps added[4.2]
+== Kibana Apps
The Kibana UI serves as a framework that can contain several different applications. You can switch between these
applications by clicking the image:images/app-button.png[App Picker] *App picker* button to display the app bar:
diff --git a/docs/area.asciidoc b/docs/area.asciidoc
index 799f6794ade3e..07baabd2d730b 100644
--- a/docs/area.asciidoc
+++ b/docs/area.asciidoc
@@ -24,19 +24,19 @@ aggregation returns the percentile rankings for the values in the numeric field
from the drop-down, then specify one or more percentile rank values in the *Values* fields. Click the *X* to remove a
values field. Click *+Add* to add a values field.
-You can add an aggregation by clicking the *+ Add Aggregation* button.
+You can add an aggregation by clicking the *+ Add Metrics* button.
include::x-axis-aggs.asciidoc[]
For example, a chart of dates with incident counts can display dates in chronological order, or you can raise the
priority of the incident-reporting aggregation to show the most active dates first. The chronological order might show
a time-dependent pattern in incident count, and sorting by active dates can reveal particular outliers in your data.
+include::color-picker.asciidoc[]
+
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
-*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
*Include Pattern*:: Specify a pattern in this field to include in the results.
-*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
definition, as in the following example:
diff --git a/docs/autorefresh.asciidoc b/docs/autorefresh.asciidoc
index a97e49346f60c..927aff68f0d7b 100644
--- a/docs/autorefresh.asciidoc
+++ b/docs/autorefresh.asciidoc
@@ -6,12 +6,12 @@ When a refresh interval is set, it is displayed to the left of the Time Filter i
To set the refresh interval:
-. Click the *Time Filter* image:images/TimeFilter.jpg[Time Filter] in the upper right corner of the menu bar.
+. Click the *Time Filter* image:images/TimeFilter.jpg[Time Filter].
. Click the *Refresh Interval* tab.
. Choose a refresh interval from the list.
-To automatically refresh the data, click the image:images/autorefresh.png[] *Auto-refresh* button and select an
-autorefresh interval:
+To automatically refresh the data, click the image:images/autorefresh.png[] *Auto-refresh* button when the time picker
+is open and select an autorefresh interval:
image::images/autorefresh-intervals.png[]
diff --git a/docs/color-formatter.asciidoc b/docs/color-formatter.asciidoc
index b03a0d59d07f9..601a4b3d38ff7 100644
--- a/docs/color-formatter.asciidoc
+++ b/docs/color-formatter.asciidoc
@@ -2,6 +2,10 @@ The `Color` field formatter enables you to specify colors with specific ranges o
When you select the `Color` field formatter, Kibana displays the *Range*, *Font Color*, *Background Color*, and *Example* fields.
-Click the *Add Color* button to add a range of values to associate with a particular color. You can click in the *Font Color* and *Background Color* fields to display a color picker. You can also enter a specific hex code value in the field. The effect of your current color choices are displayed in the *Example* field.
+Click the *Add Color* button to add a range of values to associate with a particular color. You can click in the *Font Color* and
+*Background Color* fields to display a color picker. You can also enter a specific hex code value in the field. The effect of your current
+color choices are displayed in the *Example* field.
-image::images/colorformatter.png[]
\ No newline at end of file
+image::images/colorformatter.png[]
+
+//update image
diff --git a/docs/color-picker.asciidoc b/docs/color-picker.asciidoc
new file mode 100644
index 0000000000000..5c3bf129d58f7
--- /dev/null
+++ b/docs/color-picker.asciidoc
@@ -0,0 +1,4 @@
+You can customize the colors of your visualization by clicking the color dot next to each label to display the
+_color picker_.
+
+image::images/color-picker.png[An array of color dots that users can select]
diff --git a/docs/dashboard.asciidoc b/docs/dashboard.asciidoc
index f2c603345e47a..31d957de8887b 100644
--- a/docs/dashboard.asciidoc
+++ b/docs/dashboard.asciidoc
@@ -5,7 +5,7 @@ A Kibana _dashboard_ displays a set of saved visualizations in groups that you c
dashboard to share or reload at a later time.
.Sample dashboard
-image:images/NYCTA-Dashboard.jpg[Example dashboard]
+image:images/tutorial-dashboard.png[Example dashboard]
[float]
[[dashboard-getting-started]]
@@ -22,9 +22,7 @@ The first time you click the *Dashboard* tab, Kibana displays an empty dashboard
image:images/NewDashboard.png[New Dashboard screen]
Build your dashboard by adding visualizations. By default, Kibana dashboards use a light color theme. To use a dark color
-theme instead, click the *Settings* image:images/SettingsButton.jpg[Gear] button and check the *Use dark theme* box.
-
-image:images/darktheme.png[Dark Theme Example]
+theme instead, click the *Options* button and check the *Use dark theme* box.
NOTE: You can change the default theme in the *Advanced* section of the *Settings* tab.
@@ -36,9 +34,9 @@ include::autorefresh.asciidoc[]
[[adding-visualizations-to-a-dashboard]]
==== Adding Visualizations to a Dashboard
-To add a visualization to the dashboard, click the *Add Visualization* image:images/AddVis.png[Plus] button in the
-toolbar panel. Select a saved visualization from the list. You can filter the list of visualizations by typing a filter
-string into the *Visualization Filter* field.
+To add a visualization to the dashboard, click the *Add* button in the toolbar panel. Select a saved visualization
+from the list. You can filter the list of visualizations by typing a filter string into the *Visualization Filter*
+field.
The visualization you select appears in a _container_ on your dashboard.
@@ -60,7 +58,7 @@ when you save a dashboard. To disable this behavior, clear the *Store time with
Click the *Load Saved Dashboard* button to display a list of existing dashboards. The saved dashboard selector includes
a text field to filter by dashboard name and a link to the Object Editor for managing your saved dashboards. You can
-also access the Object Editor by clicking *Settings > Edit Saved Objects*.
+also access the Object Editor by clicking *Settings > Objects*.
[float]
[[sharing-dashboards]]
@@ -71,8 +69,11 @@ in your Web page.
NOTE: A user must have Kibana access in order to view embedded dashboards.
-Click the *Share* button to display HTML code to embed the dashboard in another Web page, along with a direct link to
-the dashboard. You can select the text in either option to copy the code or the link to your clipboard.
+To share a dashboard, click the *Share* button image:images/share-dashboard.png[] to display the _Sharing_ panel.
+
+Click the *Copy to Clipboard* button image:images/share-link.png[] to copy the native URL or embed HTML to the clipboard.
+Click the *Generate short URL* button image:images/share-short-link.png[] to create a shortened URL for sharing or
+embedding.
[float]
[[embedding-dashboards]]
diff --git a/docs/datatable.asciidoc b/docs/datatable.asciidoc
index b80cb8ccd812d..0c63d610b64d9 100644
--- a/docs/datatable.asciidoc
+++ b/docs/datatable.asciidoc
@@ -39,18 +39,18 @@ in a name to display on the visualization.
*Geohash*:: The {ref}search-aggregations-bucket-geohashgrid-aggregation.html[_geohash_] aggregation displays points
based on the geohash coordinates.
-Once you've specified a bucket type aggregation, you can define sub-aggregations to refine the visualization. Click
-*+ Add Sub Aggregation* to define a sub-aggregation, then choose *Split Rows* or *Split Table*, then select a
-sub-aggregation from the list of types.
+Once you've specified a bucket type aggregation, you can define sub-buckets to refine the visualization. Click
+*+ Add sub-buckets* to define a sub-bucket, then choose *Split Rows* or *Split Table*, then select an
+aggregation from the list of types.
You can use the up or down arrows to the right of the aggregation's type to change the aggregation's priority.
+Enter a string in the *Custom Label* field to change the display label.
+
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
-*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
*Include Pattern*:: Specify a pattern in this field to include in the results.
-*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
definition, as in the following example:
diff --git a/docs/discover.asciidoc b/docs/discover.asciidoc
index 38db80d39d332..75f6e58ec5158 100644
--- a/docs/discover.asciidoc
+++ b/docs/discover.asciidoc
@@ -6,7 +6,7 @@ You can also see the number of documents that match the search query and get fie
configured for the selected index pattern, the distribution of documents over time is displayed in a histogram at the
top of the page.
-image:images/Discover-Start-Annotated.jpg[Discover Page]
+image::images/Discover-Start-Annotated.jpg[Discover Page]
[float]
[[set-time-filter]]
@@ -43,15 +43,16 @@ sets an interval based on the time range.
[[search]]
=== Searching Your Data
You can search the indices that match the current index pattern by submitting a search from the Discover page.
-You can enter simple query strings, use the Lucene https://lucene.apache.org/core/2_9_4/queryparsersyntax.html[query
-syntax], or use the full JSON-based {ref}/query-dsl.html[Elasticsearch Query DSL].
+You can enter simple query strings, use the
+Lucene https://lucene.apache.org/core/2_9_4/queryparsersyntax.html[query syntax], or use the full JSON-based
+{ref}/query-dsl.html[Elasticsearch Query DSL].
When you submit a search, the histogram, Documents table, and Fields list are updated to reflect
the search results. The total number of hits (matching documents) is shown in the upper right corner of the
histogram. The Documents table shows the first five hundred hits. By default, the hits are listed in reverse
chronological order, with the newest documents shown first. You can reverse the sort order by by clicking on the Time
-column header. You can also sort the table using the values in any indexed field. For more information, see <>.
+column header. You can also sort the table using the values in any indexed field. For more information, see
+<>.
To search your data:
@@ -79,9 +80,7 @@ Reference.
[float]
[[new-search]]
==== Starting a New Search
-To clear the current search and start a new search, click the *New Search* button in the Discover toolbar.
-
-image:images/Discover-New-Search.jpg[New Search]
+To clear the current search and start a new search, click the *New* button in the Discover toolbar.
[float]
[[save-search]]
@@ -91,19 +90,18 @@ Saving a search saves both the search query string and the currently selected in
To save the current search:
-. Click the *Save Search* button image:images/SaveButton.jpg[Save Search button] in the Discover toolbar.
+. Click the *Save* button in the Discover toolbar.
. Enter a name for the search and click *Save*.
[float]
[[load-search]]
-==== Loading a Saved Search
+==== Opening a Saved Search
To load a saved search:
-. Click the *Load Search* button image:images/LoadButton.jpg[Load Search
-button] in the Discover toolbar.
-. Select the search you want to load.
+. Click the *Open* button in the Discover toolbar.
+. Select the search you want to open.
-If the saved search is associated with a different index pattern than is currently selected, loading the saved search
+If the saved search is associated with a different index pattern than is currently selected, opening the saved search
also changes the selected index pattern.
[float]
@@ -184,13 +182,11 @@ image:images/add-column-button.png[Add Column] *Toggle column in table* button.
[float]
[[sorting]]
==== Sorting the Document List
-You can sort the documents in the Documents table by the values in any indexed field. If a time field is configured for
-the selected index pattern, by default the documents are sorted in reverse chronological order.
-
-To change the sort order:
+You can sort the documents in the Documents table by the values in any indexed field. Documents in index patterns that
+are configured with time fields are sorted in reverse chronological order by default.
-* Click the name of the field you want to sort by. The fields you can use for sorting have a sort button to the right
-of the field name. Clicking the field name a second time reverses the sort order.
+To change the sort order, click the name of the field you want to sort by. The fields you can use for sorting have a
+sort button to the right of the field name. Clicking the field name a second time reverses the sort order.
[float]
[[adding-columns]]
@@ -228,10 +224,8 @@ button image:images/RemoveFieldButton.jpg[Remove Field Button].
From the field list, you can see how many documents in the Documents table contain a particular field, what the top 5
values are, and what percentage of documents contain each value.
-To view field data statistics:
-
-* Click the name of a field in the Fields list. The field can be anywhere in the Fields list--Selected Fields, Popular
-Fields, or the list of other fields.
+To view field data statistics, click the name of a field in the Fields list. The field can be anywhere in the Fields
+list.
image:images/Discover-FieldStats.jpg[Field Statistics]
diff --git a/docs/duration-formatter.asciidoc b/docs/duration-formatter.asciidoc
new file mode 100644
index 0000000000000..36a73f61f6227
--- /dev/null
+++ b/docs/duration-formatter.asciidoc
@@ -0,0 +1,15 @@
+The `Duration` field formatter can display the numeric value of a field in the following increments:
+
+* Picoseconds
+* Nanoseconds
+* Microseconds
+* Milliseconds
+* Seconds
+* Minutes
+* Hours
+* Days
+* Weeks
+* Months
+* Years
+
+You can specify these increments with up to 20 decimal places for both input and output formats.
diff --git a/docs/filter-pinning.asciidoc b/docs/filter-pinning.asciidoc
index 090b009f63e3a..ac1176a245a6b 100644
--- a/docs/filter-pinning.asciidoc
+++ b/docs/filter-pinning.asciidoc
@@ -1,6 +1,6 @@
=== Working with Filters
-When you create a filter anywhere in Kibana, the filter conditions display in a green oval under the search text
+When you create a filter anywhere in Kibana, the filter conditions display in an oval under the search text
entry box:
image::images/filter-sample.png[]
@@ -10,7 +10,7 @@ Hovering on the filter oval displays the following icons:
image::images/filter-allbuttons.png[]
Enable Filter image:images/filter-enable.png[]:: Click this icon to disable the filter without removing it. You can
-enable the filter again later by clicking the icon again. Disabled filters display a striped shaded color, green for
+enable the filter again later by clicking the icon again. Disabled filters display a striped shaded color, grey for
inclusion filters and red for exclusion filters.
Pin Filter image:images/filter-pin.png[]:: Click this icon to _pin_ a filter. Pinned filters persist across Kibana tabs.
You can pin filters from the _Visualize_ tab, click on the _Discover_ or _Dashboard_ tabs, and those filters remain in
@@ -18,7 +18,7 @@ place.
NOTE: If you have a pinned filter and you're not seeing any query results, that your current tab's index pattern is one
that the filter applies to.
Toggle Filter image:images/filter-toggle.png[]:: Click this icon to _toggle_ a filter. By default, filters are inclusion
-filters, and display in green. Only elements that match the filter are displayed. To change this to an exclusion
+filters, and display in grey. Only elements that match the filter are displayed. To change this to an exclusion
filters, displaying only elements that _don't_ match, toggle the filter. Exclusion filters display in red.
Remove Filter image:images/filter-delete.png[]:: Click this icon to remove a filter entirely.
Custom Filter image:images/filter-custom.png[]:: Click this icon to display a text field where you can customize the JSON
diff --git a/docs/getting-started.asciidoc b/docs/getting-started.asciidoc
index 0d787a66dbd9a..ecc7dac6bbc48 100644
--- a/docs/getting-started.asciidoc
+++ b/docs/getting-started.asciidoc
@@ -6,7 +6,7 @@ key Kibana functionality. By the end of this tutorial, you will have:
* Loaded a sample data set into your Elasticsearch installation
* Defined at least one index pattern
-* Use the <> functionality to explore your data
+* Used the <> functionality to explore your data
* Set up some <> to graphically represent your data
* Assembled visualizations into a <>
@@ -14,10 +14,10 @@ The material in this section assumes you have a working Kibana install connected
Video tutorials are also available:
-* https://www.elastic.co/blog/kibana-4-video-tutorials-part-1[High-level Kibana 4 introduction, pie charts]
+* https://www.elastic.co/blog/kibana-4-video-tutorials-part-1[High-level Kibana introduction, pie charts]
* https://www.elastic.co/blog/kibana-4-video-tutorials-part-2[Data discovery, bar charts, and line charts]
* https://www.elastic.co/blog/kibana-4-video-tutorials-part-3[Tile maps]
-* https://www.elastic.co/blog/kibana-4-video-tutorials-part-4[Embedding Kibana 4 visualizations]
+* https://www.elastic.co/blog/kibana-4-video-tutorials-part-4[Embedding Kibana visualizations]
[float]
[[tutorial-load-dataset]]
@@ -76,9 +76,9 @@ The schema for the logs data set has dozens of different fields, but the notable
"@timestamp": "date"
}
-Before we load the Shakespeare data set, we need to set up a {ref}/mapping.html[_mapping_] for the fields. Mapping
-divides the documents in the index into logical groups and specifies a field's characteristics, such as the field's
-searchability or whether or not it's _tokenized_, or broken up into separate words.
+Before we load the Shakespeare and logs data sets, we need to set up {ref}mapping.html[_mappings_] for the fields.
+Mapping divides the documents in the index into logical groups and specifies a field's characteristics, such as the
+field's searchability or whether or not it's _tokenized_, or broken up into separate words.
Use the following command to set up a mapping for the Shakespeare data set:
@@ -195,10 +195,11 @@ yellow open logstash-2015.05.20 5 1 4750 0 16.4mb
[[tutorial-define-index]]
=== Defining Your Index Patterns
-Each set of data loaded to Elasticsearch has an <>. In the previous section, the Shakespeare data set has an index named `shakespeare`, and the accounts
-data set has an index named `bank`. An _index pattern_ is a string with optional wildcards that can match multiple
-indices. For example, in the common logging use case, a typical index name contains the date in MM-DD-YYYY
-format, and an index pattern for May would look something like `logstash-2015.05*`.
+Each set of data loaded to Elasticsearch has an <>. In the previous section, the
+Shakespeare data set has an index named `shakespeare`, and the accounts data set has an index named `bank`. An _index
+pattern_ is a string with optional wildcards that can match multiple indices. For example, in the common logging use
+case, a typical index name contains the date in MM-DD-YYYY format, and an index pattern for May would look something
+like `logstash-2015.05*`.
For this tutorial, any pattern that matches the name of an index we've loaded will work. Open a browser and
navigate to `localhost:5601`. Click the *Settings* tab, then the *Indices* tab. Click *Add New* to define a new index
@@ -211,11 +212,14 @@ The Logstash data set does contain time-series data, so after clicking *Add New*
set, make sure the *Index contains time-based events* box is checked and select the `@timestamp` field from the
*Time-field name* drop-down.
+NOTE: When you define an index pattern, indices that match that pattern must exist in Elasticsearch. Those indices must
+contain data.
+
[float]
[[tutorial-discovering]]
=== Discovering Your Data
-Click the *Discover* tab to display Kibana's data discovery functions:
+Click the *Discover* image:images/discover-compass.png[Compass icon] tab to display Kibana's data discovery functions:
image::images/tutorial-discover.png[]
@@ -253,7 +257,7 @@ image::images/tutorial-discover-3.png[]
The visualization tools available on the *Visualize* tab enable you to display aspects of your data sets in several
different ways.
-Click on the *Visualize* tab to start:
+Click on the *Visualize* image:images/visualize-icon.png[Bar chart icon] tab to start:
image::images/tutorial-visualize.png[]
@@ -280,7 +284,7 @@ total number of ranges to six. Enter the following ranges:
15000 30999
31000 50000
-Click the green *Apply changes* button image:images/apply-changes-button.png[] to display the chart:
+Click the *Apply changes* button image:images/apply-changes-button.png[] to display the chart:
image::images/tutorial-visualize-pie-2.png[]
@@ -288,8 +292,10 @@ This shows you what proportion of the 1000 accounts fall in these balance ranges
we're going to add another bucket aggregation. We can break down each of the balance ranges further by the account
holder's age.
-Click *Add sub-buckets* at the bottom, then select *Split Slices*. Choose the *Terms* aggregation and the *age* field from the drop-downs.
-Click the green *Apply changes* button image:images/apply-changes-button.png[] to add an external ring with the new results.
+Click *Add sub-buckets* at the bottom, then select *Split Slices*. Choose the *Terms* aggregation and the *age* field from
+the drop-downs.
+Click the *Apply changes* button image:images/apply-changes-button.png[] to add an external ring with the new
+results.
image::images/tutorial-visualize-pie-3.png[]
@@ -304,10 +310,10 @@ image::images/tutorial-visualize-bar-1.png[]
For the Y-axis metrics aggregation, select *Unique Count*, with *speaker* as the field. For Shakespeare plays, it might
be useful to know which plays have the lowest number of distinct speaking parts, if your theater company is short on
actors. For the X-Axis buckets, select the *Terms* aggregation with the *play_name* field. For the *Order*, select
-*Ascending*, leaving the *Size* at 5.
+*Ascending*, leaving the *Size* at 5. Write a description for the axes in the *Custom Label* fields.
-Leave the other elements at their default values and click the green *Apply changes* button image:images/apply-changes-button.png[]. Your chart should now look
-like this:
+Leave the other elements at their default values and click the *Apply changes* button
+image:images/apply-changes-button.png[]. Your chart should now look like this:
image::images/tutorial-visualize-bar-2.png[]
@@ -321,14 +327,18 @@ as well as change many other options for your visualizations, by clicking the *O
Now that you have a list of the smallest casts for Shakespeare plays, you might also be curious to see which of these
plays makes the greatest demands on an individual actor by showing the maximum number of speeches for a given part. Add
a Y-axis aggregation with the *Add metrics* button, then choose the *Max* aggregation for the *speech_number* field. In
-the *Options* tab, change the *Bar Mode* drop-down to *grouped*, then click the green *Apply changes* button image:images/apply-changes-button.png[]. Your
-chart should now look like this:
+the *Options* tab, change the *Bar Mode* drop-down to *grouped*, then click the *Apply changes* button
+image:images/apply-changes-button.png[]. Your chart should now look like this:
image::images/tutorial-visualize-bar-3.png[]
As you can see, _Love's Labours Lost_ has an unusually high maximum speech number, compared to the other plays, and
might therefore make more demands on an actor's memory.
+Note how the *Number of speaking parts* Y-axis starts at zero, but the bars don't begin to differentiate until 18. To
+make the differences stand out, starting the Y-axis at a value closer to the minimum, check the
+*Scale Y-Axis to data bounds* box in the *Options* tab.
+
Save this chart with the name _Bar Example_.
Next, we're going to make a tile map chart to visualize some geographic data. Click on *New Visualization*, then
@@ -343,8 +353,8 @@ at the bottom. You'll see a map of the world, since we haven't defined any bucke
image::images/tutorial-visualize-map-1.png[]
-Select *Geo Coordinates* as the bucket, then click the green *Apply changes* button image:images/apply-changes-button.png[]. Your chart should now look like
-this:
+Select *Geo Coordinates* as the bucket, then click the *Apply changes* button image:images/apply-changes-button.png[].
+Your chart should now look like this:
image::images/tutorial-visualize-map-2.png[]
@@ -371,7 +381,8 @@ Write the following text in the field:
The Markdown widget uses **markdown** syntax.
> Blockquotes in Markdown use the > character.
-Click the green *Apply changes* button image:images/apply-changes-button.png[] to display the rendered Markdown in the preview pane:
+Click the *Apply changes* button image:images/apply-changes-button.png[] to display the rendered Markdown in the
+preview pane:
image::images/tutorial-visualize-md-2.png[]
diff --git a/docs/images/Discover-Start-Annotated.jpg b/docs/images/Discover-Start-Annotated.jpg
index bb3bd9ecfd592..eacdfbaee4c0e 100644
Binary files a/docs/images/Discover-Start-Annotated.jpg and b/docs/images/Discover-Start-Annotated.jpg differ
diff --git a/docs/images/NewDashboard.png b/docs/images/NewDashboard.png
index bace573f40924..89e816c0458ae 100644
Binary files a/docs/images/NewDashboard.png and b/docs/images/NewDashboard.png differ
diff --git a/docs/images/Start-Page.png b/docs/images/Start-Page.png
index 756b6b5bdbd2d..6d0544a66303b 100644
Binary files a/docs/images/Start-Page.png and b/docs/images/Start-Page.png differ
diff --git a/docs/images/TimeFilter.jpg b/docs/images/TimeFilter.jpg
index 1de61b791e5bb..1c8700bc05616 100644
Binary files a/docs/images/TimeFilter.jpg and b/docs/images/TimeFilter.jpg differ
diff --git a/docs/images/VizEditor.jpg b/docs/images/VizEditor.jpg
index 2dc5241a6988d..8aabfe544a0cd 100644
Binary files a/docs/images/VizEditor.jpg and b/docs/images/VizEditor.jpg differ
diff --git a/docs/images/apply-changes-button.png b/docs/images/apply-changes-button.png
index c45723877a51c..7ec98e6ccdcb4 100644
Binary files a/docs/images/apply-changes-button.png and b/docs/images/apply-changes-button.png differ
diff --git a/docs/images/autorefresh-pause.png b/docs/images/autorefresh-pause.png
index 757d3367db58b..5a83c4587c961 100644
Binary files a/docs/images/autorefresh-pause.png and b/docs/images/autorefresh-pause.png differ
diff --git a/docs/images/autorefresh.png b/docs/images/autorefresh.png
index b4cad35383e19..cf24bac5af08f 100644
Binary files a/docs/images/autorefresh.png and b/docs/images/autorefresh.png differ
diff --git a/docs/images/color-picker.png b/docs/images/color-picker.png
new file mode 100644
index 0000000000000..a1148d3f4b1df
Binary files /dev/null and b/docs/images/color-picker.png differ
diff --git a/docs/images/discover-compass.png b/docs/images/discover-compass.png
new file mode 100644
index 0000000000000..d92f2223afcfb
Binary files /dev/null and b/docs/images/discover-compass.png differ
diff --git a/docs/images/kibana-status-page.png b/docs/images/kibana-status-page.png
index f21be1fbd4cd0..f20a7970ba76d 100644
Binary files a/docs/images/kibana-status-page.png and b/docs/images/kibana-status-page.png differ
diff --git a/docs/images/share-dashboard.png b/docs/images/share-dashboard.png
new file mode 100644
index 0000000000000..0c1d4c9bd46d6
Binary files /dev/null and b/docs/images/share-dashboard.png differ
diff --git a/docs/images/share-link.png b/docs/images/share-link.png
new file mode 100644
index 0000000000000..68894c16a5444
Binary files /dev/null and b/docs/images/share-link.png differ
diff --git a/docs/images/share-short-link.png b/docs/images/share-short-link.png
new file mode 100644
index 0000000000000..230b7c151e7f6
Binary files /dev/null and b/docs/images/share-short-link.png differ
diff --git a/docs/images/sharing-panel.png b/docs/images/sharing-panel.png
new file mode 100644
index 0000000000000..43abf920a8e07
Binary files /dev/null and b/docs/images/sharing-panel.png differ
diff --git a/docs/images/tutorial-dashboard.png b/docs/images/tutorial-dashboard.png
index 8f368541cbe75..2a0ec34bcca5a 100644
Binary files a/docs/images/tutorial-dashboard.png and b/docs/images/tutorial-dashboard.png differ
diff --git a/docs/images/tutorial-discover-2.png b/docs/images/tutorial-discover-2.png
index 6f52c6ae755f5..fdf6477e1375c 100644
Binary files a/docs/images/tutorial-discover-2.png and b/docs/images/tutorial-discover-2.png differ
diff --git a/docs/images/tutorial-discover-3.png b/docs/images/tutorial-discover-3.png
index 011537d4fad4d..15f164a2f3cc5 100644
Binary files a/docs/images/tutorial-discover-3.png and b/docs/images/tutorial-discover-3.png differ
diff --git a/docs/images/tutorial-discover.png b/docs/images/tutorial-discover.png
index 5791915486273..f2a7eac1917b4 100644
Binary files a/docs/images/tutorial-discover.png and b/docs/images/tutorial-discover.png differ
diff --git a/docs/images/tutorial-timepicker.png b/docs/images/tutorial-timepicker.png
index b8117d10bd33c..f1ac7befa451d 100644
Binary files a/docs/images/tutorial-timepicker.png and b/docs/images/tutorial-timepicker.png differ
diff --git a/docs/images/tutorial-visualize-bar-1.png b/docs/images/tutorial-visualize-bar-1.png
index 54312eb376290..f9acbe8f37209 100644
Binary files a/docs/images/tutorial-visualize-bar-1.png and b/docs/images/tutorial-visualize-bar-1.png differ
diff --git a/docs/images/tutorial-visualize-bar-2.png b/docs/images/tutorial-visualize-bar-2.png
index 471922df2ac8a..1c047e08485c9 100644
Binary files a/docs/images/tutorial-visualize-bar-2.png and b/docs/images/tutorial-visualize-bar-2.png differ
diff --git a/docs/images/tutorial-visualize-bar-3.png b/docs/images/tutorial-visualize-bar-3.png
index a117c6a0ed34f..4da21b0eb53cb 100644
Binary files a/docs/images/tutorial-visualize-bar-3.png and b/docs/images/tutorial-visualize-bar-3.png differ
diff --git a/docs/images/tutorial-visualize-map-1.png b/docs/images/tutorial-visualize-map-1.png
index 01c74787b8c7d..539c54c1b23b7 100644
Binary files a/docs/images/tutorial-visualize-map-1.png and b/docs/images/tutorial-visualize-map-1.png differ
diff --git a/docs/images/tutorial-visualize-map-2.png b/docs/images/tutorial-visualize-map-2.png
index 0a1ad6008b59a..4b3267043929f 100644
Binary files a/docs/images/tutorial-visualize-map-2.png and b/docs/images/tutorial-visualize-map-2.png differ
diff --git a/docs/images/tutorial-visualize-map-3.png b/docs/images/tutorial-visualize-map-3.png
index 30a7e18dc94a6..9e57838637cdc 100644
Binary files a/docs/images/tutorial-visualize-map-3.png and b/docs/images/tutorial-visualize-map-3.png differ
diff --git a/docs/images/tutorial-visualize-md-1.png b/docs/images/tutorial-visualize-md-1.png
index cc57c3e6c1279..55504682a0e5b 100644
Binary files a/docs/images/tutorial-visualize-md-1.png and b/docs/images/tutorial-visualize-md-1.png differ
diff --git a/docs/images/tutorial-visualize-md-2.png b/docs/images/tutorial-visualize-md-2.png
index 307334f4ff725..c1bbc120aa011 100644
Binary files a/docs/images/tutorial-visualize-md-2.png and b/docs/images/tutorial-visualize-md-2.png differ
diff --git a/docs/images/tutorial-visualize-pie-1.png b/docs/images/tutorial-visualize-pie-1.png
index cbbdf3a64cdc1..f6445ff6a36cb 100644
Binary files a/docs/images/tutorial-visualize-pie-1.png and b/docs/images/tutorial-visualize-pie-1.png differ
diff --git a/docs/images/tutorial-visualize-pie-2.png b/docs/images/tutorial-visualize-pie-2.png
index e26df22f37d31..781a30ce8d448 100644
Binary files a/docs/images/tutorial-visualize-pie-2.png and b/docs/images/tutorial-visualize-pie-2.png differ
diff --git a/docs/images/tutorial-visualize-pie-3.png b/docs/images/tutorial-visualize-pie-3.png
index 5d282ac5b3ef5..340cad28c53de 100644
Binary files a/docs/images/tutorial-visualize-pie-3.png and b/docs/images/tutorial-visualize-pie-3.png differ
diff --git a/docs/images/tutorial-visualize.png b/docs/images/tutorial-visualize.png
index d02a6946c932a..f5b47d8434a78 100644
Binary files a/docs/images/tutorial-visualize.png and b/docs/images/tutorial-visualize.png differ
diff --git a/docs/images/visualize-icon.png b/docs/images/visualize-icon.png
new file mode 100644
index 0000000000000..1166eb86134b4
Binary files /dev/null and b/docs/images/visualize-icon.png differ
diff --git a/docs/index.asciidoc b/docs/index.asciidoc
index ca50db3926ab8..4ef7fe6301462 100644
--- a/docs/index.asciidoc
+++ b/docs/index.asciidoc
@@ -3,8 +3,11 @@
:ref: http://www.elastic.co/guide/en/elasticsearch/reference/current/
:shield: https://www.elastic.co/guide/en/shield/current
+:scyld: X-Pack Security
:k4issue: https://github.com/elastic/kibana/issues/
:k4pull: https://github.com/elastic/kibana/pull/
+:version: master
+:esversion: master
include::introduction.asciidoc[]
diff --git a/docs/introduction.asciidoc b/docs/introduction.asciidoc
index f4e336104360c..664e73e89d7f2 100644
--- a/docs/introduction.asciidoc
+++ b/docs/introduction.asciidoc
@@ -1,21 +1,24 @@
[[introduction]]
== Introduction
-Kibana is an open source analytics and visualization platform designed to work
-with Elasticsearch. You use Kibana to search, view, and interact with data
-stored in Elasticsearch indices. You can easily perform advanced data analysis
+Kibana is an open source analytics and visualization platform designed to work with Elasticsearch. You use Kibana to
+search, view, and interact with data stored in Elasticsearch indices. You can easily perform advanced data analysis
and visualize your data in a variety of charts, tables, and maps.
-Kibana makes it easy to understand large volumes of data. Its simple,
-browser-based interface enables you to quickly create and share dynamic
-dashboards that display changes to Elasticsearch queries in real time.
+Kibana makes it easy to understand large volumes of data. Its simple, browser-based interface enables you to quickly
+create and share dynamic dashboards that display changes to Elasticsearch queries in real time.
-Setting up Kibana is a snap. You can install Kibana and start exploring your
-Elasticsearch indices in minutes -- no code, no additional infrastructure required.
+Setting up Kibana is a snap. You can install Kibana and start exploring your Elasticsearch indices in minutes -- no
+code, no additional infrastructure required.
-NOTE: This guide describes how to use Kibana 4.3. For information about what's new
-in Kibana 4.3, see the <>.
+For more information about creating and sharing visualizations and dashboards, see the <>
+and <> topics. A complete <> covering several aspects of Kibana's
+functionality is also available.
+
+NOTE: This guide describes how to use Kibana {version}. For information about what's new in Kibana {version}, see
+the <>.
+////
[float]
[[data-discovery]]
=== Data Discovery and Visualization
@@ -50,7 +53,4 @@ to correlate related information. For example, we could create a dashboard
that displays several visualizations of the TFL data:
image:images/TFL-Dashboard.jpg[Dashboard]
-
-For more information about creating and sharing visualizations and dashboards, see the <>
-and <> topics. A complete <> covering several aspects of Kibana's
-functionality is also available.
+////
diff --git a/docs/kibana-repositories.asciidoc b/docs/kibana-repositories.asciidoc
index ff67335323bf8..a2492d8b680c7 100644
--- a/docs/kibana-repositories.asciidoc
+++ b/docs/kibana-repositories.asciidoc
@@ -1,12 +1,12 @@
[[setup-repositories]]
=== Kibana Repositories
-Binary packages for Kibana are available for Unix distributions that support the `apt` and `yum` tools.We also have
-repositories available for APT and YUM based distributions.
+Binary packages for Kibana are available for Unix distributions that support the `apt` and `yum` tools. We also have
+repositories available for APT and YUM based distributions.
NOTE: Since the packages are created as part of the Kibana build, source packages are not available.
-Packages are signed with the PGP key http://pgp.mit.edu/pks/lookup?op=vindex&search=0xD27D666CD88E42B4[D88E42B4], which
+Packages are signed with the PGP key http://pgp.mit.edu/pks/lookup?op=vindex&search=0xD27D666CD88E42B4[D88E42B4], which
has the following fingerprint:
4609 5ACC 8548 582C 1A26 99A9 D27D 666C D88E 42B4
@@ -22,32 +22,33 @@ has the following fingerprint:
wget -qO - https://packages.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
--------------------------------------------------
+
-. Add the repository definition to your `/etc/apt/sources.list` file:
+. Add the repository definition to your `/etc/apt/sources.list.d/kibana.list` file:
+
-[source, sh]
+["source","sh",subs="attributes"]
--------------------------------------------------
-echo "deb http://packages.elastic.co/kibana/{branch}/debian stable main" | sudo tee -a /etc/apt/sources.list
+echo "deb http://packages.elastic.co/kibana/{version}/debian stable main" | sudo tee -a /etc/apt/sources.list.d/kibana.list
--------------------------------------------------
+
[WARNING]
==================================================
-Use the `echo` method described above to add the Kibana repository. Do not use `add-apt-repository`, as that command
+Use the `echo` method described above to add the Kibana repository. Do not use `add-apt-repository`, as that command
adds a `deb-src` entry with no corresponding source package.
-When the `deb-src` entry, is present, the commands in this procedure generate an error similar to the following:
+
+When the `deb-src` entry is present, the commands in this procedure generate an error similar to the following:
Unable to find expected entry 'main/source/Sources' in Release file (Wrong sources.list entry or malformed file)
-Delete the `deb-src` entry from the `/etc/apt/sources.list` file to clear the error.
+Delete the `deb-src` entry from the `/etc/apt/sources.list.d/kibana.list` file to clear the error.
==================================================
+
-. Run `apt-get update` and the repository is ready for use. Install Kibana with the following command:
+. Run `apt-get update` to ready the repository. Install Kibana with the following command:
+
[source,sh]
--------------------------------------------------
sudo apt-get update && sudo apt-get install kibana
--------------------------------------------------
+
-. Configure Kibana to automatically start during bootup. If your distribution is using the System V version of `init`,
+. Configure Kibana to automatically start during bootup. If your distribution is using the System V version of `init`,
run the following command:
+
[source,sh]
@@ -67,7 +68,7 @@ sudo /bin/systemctl enable kibana.service
[[kibana-yum]]
===== Installing Kibana with yum
-WARNING: The repositories set up in this procedure are not compatible with distributions using version 3 of `rpm`, such
+WARNING: The repositories set up in this procedure are not compatible with distributions using version 3 of `rpm`, such
as CentOS version 5.
. Download and install the public signing key:
@@ -79,11 +80,11 @@ rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch
+
. Create a file named `kibana.repo` in the `/etc/yum.repos.d/` directory with the following contents:
+
-[source,sh]
+["source","sh",subs="attributes"]
--------------------------------------------------
-[kibana-{branch}]
-name=Kibana repository for {branch}.x packages
-baseurl=http://packages.elastic.co/kibana/{branch}/centos
+[kibana-{version}]
+name=Kibana repository for {version}.x packages
+baseurl=http://packages.elastic.co/kibana/{version}/centos
gpgcheck=1
gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch
enabled=1
@@ -96,8 +97,8 @@ enabled=1
yum install kibana
--------------------------------------------------
+
-Configure Kibana to automatically start during bootup. If your distribution is using the System V version of `init`,
-run the following command:
+Configure Kibana to automatically start during bootup. If your distribution is using the System V version of `init`
+(check with `ps -p 1`), run the following command:
+
[source,sh]
--------------------------------------------------
diff --git a/docs/kibana-yml.asciidoc b/docs/kibana-yml.asciidoc
new file mode 100644
index 0000000000000..43272c0d73291
--- /dev/null
+++ b/docs/kibana-yml.asciidoc
@@ -0,0 +1,48 @@
+.Kibana Configuration Settings
+[horizontal]
+`server.port:`:: *Default: 5601* Kibana is served by a back end server. This setting specifies the port to use.
+`server.host:`:: *Default: "0.0.0.0"* This setting specifies the IP address of the back end server.
+`server.basePath:`:: Enables you to specify a path to mount Kibana at if you are running behind a proxy. This setting
+cannot end in a slash (`/`).
+`server.maxPayloadBytes:`:: *Default: 1048576* The maximum payload size in bytes for incoming server requests.
+`server.name:`:: *Default: "your-hostname"* A human-readable display name that identifies this Kibana instance.
+`elasticsearch.url:`:: *Default: "http://localhost:9200"* The URL of the Elasticsearch instance to use for all your
+queries.
+`elasticsearch.preserveHost:`:: *Default: true* When this setting’s value is true Kibana uses the hostname specified in
+the `server.host` setting. When the value of this setting is `false`, Kibana uses the hostname of the host that connects
+to this Kibana instance.
+`kibana.index:`:: *Default: ".kibana"* Kibana uses an index in Elasticsearch to store saved searches, visualizations and
+dashboards. Kibana creates a new index if the index doesn’t already exist.
+`kibana.defaultAppId:`:: *Default: "discover"* The default application to load.
+`elasticsearch.username:` and `elasticsearch.password:`:: If your Elasticsearch is protected with basic authentication,
+these settings provide the username and password that the Kibana server uses to perform maintenance on the Kibana index at
+startup. Your Kibana users still need to authenticate with Elasticsearch, which is proxied through the Kibana server.
+`server.ssl.cert:` and `server.ssl.key:`:: Paths to the PEM-format SSL certificate and SSL key files, respectively. These
+files enable SSL for outgoing requests from the Kibana server to the browser.
+`elasticsearch.ssl.cert:` and `elasticsearch.ssl.key:`:: Optional settings that provide the paths to the PEM-format SSL
+certificate and key files. These files validate that your Elasticsearch backend uses the same key files.
+`elasticsearch.ssl.ca:`:: Optional setting that enables you to specify a path to the PEM file for the certificate
+authority for your Elasticsearch instance.
+`elasticsearch.ssl.verify:`:: *Default: true* To disregard the validity of SSL certificates, change this setting’s value
+to `false`.
+`elasticsearch.pingTimeout:`:: *Default: the value of the `elasticsearch.requestTimeout` setting* Time in milliseconds to
+wait for Elasticsearch to respond to pings.
+`elasticsearch.requestTimeout:`:: *Default: 30000* Time in milliseconds to wait for responses from the back end or
+Elasticsearch. This value must be a positive integer.
+`elasticsearch.requestHeadersWhitelist:`:: *Default: `[ 'authorization' ]`* List of Kibana client-side headers to send to Elasticsearch.
+To send *no* client-side headers, set this value to [] (an empty list).
+`elasticsearch.shardTimeout:`:: *Default: 0* Time in milliseconds for Elasticsearch to wait for responses from shards. Set
+to 0 to disable.
+`elasticsearch.startupTimeout:`:: *Default: 5000* Time in milliseconds to wait for Elasticsearch at Kibana startup before
+retrying.
+`pid.file:`:: Specifies the path where Kibana creates the process ID file.
+`logging.dest:`:: *Default: `stdout`* Enables you specify a file where Kibana stores log output.
+`logging.silent:`:: *Default: false* Set the value of this setting to `true` to suppress all logging output.
+`logging.quiet:`:: *Default: false* Set the value of this setting to `true` to suppress all logging output other than
+error messages.
+`logging.verbose`:: *Default: false* Set the value of this setting to `true` to log all events, including system usage
+information and all requests.
+`ops.interval`:: *Default: 5000* Set the interval in milliseconds to sample system and process performance metrics.
+The minimum value is 100.
+`status.allowAnonymous`:: *Default: false* If authentication is enabled, setting this to `true` allows
+unauthenticated users to access the Kibana server status API and status page.
diff --git a/docs/line.asciidoc b/docs/line.asciidoc
index 3ab261c43260c..a349176a6c01c 100644
--- a/docs/line.asciidoc
+++ b/docs/line.asciidoc
@@ -11,6 +11,8 @@ if the splits are displayed in a row or a column by clicking the *Rows | Columns
include::x-axis-aggs.asciidoc[]
+include::color-picker.asciidoc[]
+
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
@@ -48,7 +50,7 @@ values.
*Scale Y-Axis to Data Bounds*:: The default Y-axis bounds are zero and the maximum value returned in the data. Check
this box to change both upper and lower bounds to match the values returned in the data.
-After changing options, click the green *Apply changes* button to update your visualization, or the grey *Discard
+After changing options, click the *Apply changes* button to update your visualization, or the grey *Discard
changes* button to keep your visualization in its current state.
[float]
diff --git a/docs/metric.asciidoc b/docs/metric.asciidoc
index 8a813f7dba866..e4ce743a8210b 100644
--- a/docs/metric.asciidoc
+++ b/docs/metric.asciidoc
@@ -4,6 +4,7 @@
A metric visualization displays a single number for each aggregation you select:
include::y-axis-aggs.asciidoc[]
+
You can click the *Advanced* link to display more customization options:
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
@@ -17,7 +18,7 @@ NOTE: In Elasticsearch releases 1.4.3 and later, this functionality requires you
The availability of these options varies depending on the aggregation you choose.
-Click the *Options* tab to change the font used to display the metrics.
+Click the *Options* tab to display the font size slider.
[float]
[[metric-viewing-detailed-information]]
diff --git a/docs/pie.asciidoc b/docs/pie.asciidoc
index 5ad500f4ee435..b46c2dc27e3e5 100644
--- a/docs/pie.asciidoc
+++ b/docs/pie.asciidoc
@@ -11,6 +11,8 @@ field. Select a field from the drop-down.
*Unique Count*:: The {ref}search-aggregations-metrics-cardinality-aggregation.html[_cardinality_] aggregation returns
the number of unique values in a field. Select a field from the drop-down.
+Enter a string in the *Custom Label* field to change the display label.
+
The _buckets_ aggregations determine what information is being retrieved from your data set.
Before you choose a buckets aggregation, specify if you are splitting slices within a single chart or splitting into
@@ -48,19 +50,21 @@ in a name to display on the visualization.
{ref}search-aggregations-bucket-significantterms-aggregation.html[_significant terms_] aggregation. The value of the
*Size* parameter defines the number of entries this aggregation returns.
-After defining an initial bucket aggregation, you can define sub-aggregations to refine the visualization. Click *+ Add
-Sub Aggregation* to define a sub-aggregation, then choose *Split Slices* to select a sub-aggregation from the list of
+After defining an initial bucket aggregation, you can define sub-buckets to refine the visualization. Click *+ Add
+sub-buckets* to define a sub-aggregation, then choose *Split Slices* to select a sub-bucket from the list of
types.
When multiple aggregations are defined on a chart's axis, you can use the up or down arrows to the right of the
aggregation's type to change the aggregation's priority.
+include::color-picker.asciidoc[]
+
+Enter a string in the *Custom Label* field to change the display label.
+
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
-*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
*Include Pattern*:: Specify a pattern in this field to include in the results.
-*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
definition, as in the following example:
@@ -78,7 +82,7 @@ Select the *Options* tab to change the following aspects of the table:
*Show Tooltip*:: Check this box to enable the display of tooltips.
*Show Legend*:: Check this box to enable the display of a legend next to the chart.
-After changing options, click the green *Apply changes* button to update your visualization, or the grey *Discard
+After changing options, click the *Apply changes* button to update your visualization, or the grey *Discard
changes* button to keep your visualization in its current state.
[float]
diff --git a/docs/plugins.asciidoc b/docs/plugins.asciidoc
index 47e2ad15b0656..f1548b0ddd632 100644
--- a/docs/plugins.asciidoc
+++ b/docs/plugins.asciidoc
@@ -1,53 +1,34 @@
[[kibana-plugins]]
-== Kibana Plugins added[4.2]
+== Kibana Plugins
-Add-on functionality for Kibana is implemented with plug-in modules. You can use the `bin/kibana plugin`
+Add-on functionality for Kibana is implemented with plug-in modules. You can use the `bin/kibana-plugin`
command to manage these modules. You can also install a plugin manually by moving the plugin file to the
`installedPlugins` directory and unpacking the plugin files into a new directory.
+A list of existing Kibana plugins is available on https://github.com/elastic/kibana/wiki/Known-Plugins[GitHub].
+
[float]
=== Installing Plugins
Use the following command to install a plugin:
[source,shell]
-bin/kibana plugin --install //
-
-You can also use `-i` instead of `--install`, as in the following example:
-
-[source,shell]
-bin/kibana plugin -i elasticsearch/marvel/latest
+bin/kibana-plugin install
-Because the organization given is `elasticsearch`, the plugin management tool automatically downloads the
-plugin from `download.elastic.co`.
+When you specify a plugin name without a URL, the plugin tool attempts to download the plugin from `download.elastic.co`.
[float]
-=== Installing Plugins from Github
+==== Installing Plugins from an Arbitrary URL
-When the specified plugin is not found at `download.elastic.co`, the plugin management tool parses the element
-as a Github user name, as in the following example:
+You can specify a URL to a specific plugin, as in the following example:
[source,shell]
-bin/kibana plugin --install github-user/sample-plugin
-Installing sample-plugin
-Attempting to extract from https://download.elastic.co/github-user/sample-plugin/sample-plugin-latest.tar.gz
-Attempting to extract from https://github.com/github-user/sample-plugin/archive/master.tar.gz
-Downloading bytes....................
-Extraction complete
-Optimizing and caching browser bundles...
-Plugin installation complete
-
-[float]
-=== Installing Plugins from an Arbitrary URL
-
-You can specify a URL to a plugin with the `-u` or `--url` options after the `-i` or `--install` option, as in the
-following example:
-
-[source,shell]
-bin/kibana plugin -i sample-plugin -u https://some.sample.url/directory
-Installing sample-plugin
-Attempting to extract from https://some.sample.url/directory
-Downloading bytes....................
+$ bin/kibana-plugin install https://download.elastic.co/kibana/x-pack/x-pack-5.0.0-snapshot.zip
+Attempting to transfer from https://download.elastic.co/kibana/x-pack//x-pack-5.0.0-snapshot.zip
+Transferring bytes....................
+Transfer complete
+Retrieving metadata from plugin archive
+Extracting plugin archive
Extraction complete
Optimizing and caching browser bundles...
Plugin installation complete
@@ -57,13 +38,17 @@ You can specify URLs that use the HTTP, HTTPS, or `file` protocols.
[float]
=== Installing Plugins to an Arbitrary Directory
-Use the `-d` or `--plugin-dir` option to specify a directory for plugins, as in the following example:
+Use the `-d` or `--plugin-dir` option after the `install` command to specify a directory for plugins, as in the following
+example:
[source,shell]
-bin/kibana plugin -i elasticsearch/sample-plugin/latest -d
+$ bin/kibana-plugin install file:///some/local/path/x-pack.zip -d path/to/directory
Installing sample-plugin
-Attempting to extract from https://download.elastic.co/elasticsearch/sample-plugin/sample-plugin-latest.tar.gz
-Downloading bytes....................
+Attempting to transfer from file:///some/local/path/x-pack.zip
+Transferring bytes....................
+Transfer complete
+Retrieving metadata from plugin archive
+Extracting plugin archive
Extraction complete
Optimizing and caching browser bundles...
Plugin installation complete
@@ -73,13 +58,17 @@ NOTE: This command creates the specified directory if it does not already exist.
[float]
=== Removing Plugins
-Use the `--remove` or `-r` option to remove a plugin, including any configuration information, as in the following
-example:
+Use the `remove` command to remove a plugin, including any configuration information, as in the following example:
[source,shell]
-bin/kibana plugin --remove marvel
+$ bin/kibana-plugin remove timelion
-You can also remove a plugin manually by deleting the plugin's subdirectory under the `installedPlugins` directory.
+You can also remove a plugin manually by deleting the plugin's subdirectory under the `installedPlugins/` directory.
+
+[float]
+=== Listing Installed Plugins
+
+Use the `list` command to list the currently installed plugins.
[float]
=== Updating Plugins
@@ -90,27 +79,28 @@ To update a plugin, remove the current version and reinstall the plugin.
=== Configuring the Plugin Manager
By default, the plugin manager provides you with feedback on the status of the activity you've asked the plugin manager
-to perform. You can control the level of feedback with the `--quiet` and `--silent` options. Use the `--quiet` option to
-suppress all non-error output. Use the `--silent` option to suppress all output.
+to perform. You can control the level of feedback for the `install` and `remove` commands with the `--quiet` and
+`--silent` options. Use the `--quiet` option to suppress all non-error output. Use the `--silent` option to suppress all
+output.
-By default, plugin manager requests do not time out. Use the `--timeout` option, followed by a time, to change this
-behavior, as in the following examples:
+By default, plugin manager installation requests do not time out. Use the `--timeout` option, followed by a time, to
+change this behavior, as in the following examples:
[source,shell]
.Waits for 30 seconds before failing
-bin/kibana plugin --install username/sample-plugin --timeout 30s
+bin/kibana-plugin install --timeout 30s sample-plugin
[source,shell]
.Waits for 1 minute before failing
-bin/kibana plugin --install username/sample-plugin --timeout 1m
+bin/kibana-plugin install --timeout 1m sample-plugin
[float]
==== Plugins and Custom Kibana Configurations
-Use the `-c` or `--config` options to specify the path to the configuration file used to start Kibana. By default, Kibana
-uses the configuration file `config/kibana.yml`. When you change your installed plugins, the `bin/kibana plugin` command
-restarts the Kibana server. When you are using a customized configuration file, you must specify the
-path to that configuration file each time you use the `bin/kibana plugin` command.
+Use the `-c` or `--config` options with the `install` and `remove` commands to specify the path to the configuration file
+used to start Kibana. By default, Kibana uses the configuration file `config/kibana.yml`. When you change your installed
+plugins, the `bin/kibana-plugin` command restarts the Kibana server. When you are using a customized configuration file,
+you must specify the path to that configuration file each time you use the `bin/kibana-plugin` command.
[float]
=== Plugin Manager Exit Codes
@@ -126,9 +116,14 @@ path to that configuration file each time you use the `bin/kibana plugin` comman
== Switching Plugin Functionality
The Kibana UI serves as a framework that can contain several different plugins. You can switch between these
-plugins by clicking the image:images/app-button.png[Plugin Chooser] *Plugin chooser* button to display icons for the
-installed plugins:
+plugins by clicking the icons for your desired plugins in the left-hand navigation bar.
+
+[float]
+=== Disabling Plugins
-image::images/app-picker.png[]
+Use the following command to disable a plugin:
+
+[source,shell]
+./bin/kibana --.enabled=false
-Click a plugin's icon to switch to that plugin's functionality.
+You can find a plugin's plugin ID as the value of the `name` property in the plugin's `package.json` file.
\ No newline at end of file
diff --git a/docs/production.asciidoc b/docs/production.asciidoc
index 13eaffc29ec1b..f911770e236c6 100644
--- a/docs/production.asciidoc
+++ b/docs/production.asciidoc
@@ -1,6 +1,6 @@
[[production]]
== Using Kibana in a Production Environment
-* <>
+* <>
* <>
* <>
* <>
@@ -19,31 +19,29 @@ and an Elasticsearch client node on the same machine. For more information, see
[float]
[[configuring-kibana-shield]]
-=== Configuring Kibana to Work with Shield
-If you are using Shield to authenticate Elasticsearch users, you need to provide
+=== Configuring Kibana to Work with {scyld}
+If you are using {scyld} to authenticate Elasticsearch users, you need to provide
the Kibana server with credentials so it can access the `.kibana` index and monitor
the cluster.
To configure credentials for the Kibana server:
-. Assign the `kibana4_server` role to a user in Shield. For more information, see
-{shield}/kibana.html#kibana4-server-role[Configuring a Role for the Kibana 4 Server]
-in the Shield documentation.
+. Assign the `kibana4_server` role to a user in {scyld}. For more information, see
+{shield}/kibana.html#kibana4-server-role[Configuring a Role for the Kibana Server]
+in the {scyld} documentation.
. Set the `kibana_elasticsearch_username` and
`kibana_elasticsearch_password` properties in `kibana.yml` to specify the credentials
-of the user you assigned the `kibana4_server`
-role:
+of the user you assigned the `kibana4_server` role:
+
[source,text]
----
-kibana_elasticsearch_username: kibana4-user
-kibana_elasticsearch_password: kibana4-password
+kibana_elasticsearch_username: kibana-user
+kibana_elasticsearch_password: kibana-password
----
-Kibana 4 users also need access to the `.kibana` index so they can save and load searches, visualizations, and dashboards.
-For more information, see {shield}/kibana.html#kibana4-server-role[Configuring Roles for Kibana 4 Users] in
-the Shield documentation.
+Users on release 4.0 and later of Kibana also need access to the `.kibana` index so they can save and load searches, visualizations, and
+dashboards. For more information, see {shield}/kibana.html[Using Kibana with {scyld}].
TIP: See <> for important information on Kibana and
the dynamic mapping feature in Elasticsearch.
@@ -64,7 +62,7 @@ server.ssl.key: /path/to/your/server.key
server.ssl.cert: /path/to/your/server.crt
----
-If you are using Shield or a proxy that provides an HTTPS endpoint for Elasticsearch,
+If you are using {scyld} or a proxy that provides an HTTPS endpoint for Elasticsearch,
you can configure Kibana to access Elasticsearch via HTTPS so communications between
the Kibana server and Elasticsearch are encrypted.
@@ -89,15 +87,14 @@ ca: /path/to/your/ca/cacert.pem
[float]
[[controlling-access]]
=== Controlling access
-You can use http://www.elastic.co/overview/shield/[Elasticsearch Shield]
-(Shield) to control what Elasticsearch data users can access through Kibana.
-Shield provides index-level access control. If a user isn't authorized to run
+You can use http://www.elastic.co/overview/shield/[{scyld}] to control what Elasticsearch data users can access through Kibana.
+{scyld} provides index-level access control. If a user isn't authorized to run
the query that populates a Kibana visualization, the user just sees an empty
visualization.
-To configure access to Kibana using Shield, you create Shield roles
+To configure access to Kibana using {scyld}, you create roles
for Kibana using the `kibana4` default role as a starting point. For more
-information, see {shield}/kibana.html#using-kibana4-with-shield[Using Kibana 4 with Shield].
+information, see {shield}/kibana.html#using-kibana4-with-shield[Using Kibana with {scyld}].
[float]
[[load-balancing]]
diff --git a/docs/releasenotes.asciidoc b/docs/releasenotes.asciidoc
index 0e7065d0c749f..a9e965bd22dfb 100644
--- a/docs/releasenotes.asciidoc
+++ b/docs/releasenotes.asciidoc
@@ -1,27 +1,31 @@
[[releasenotes]]
-== Kibana 4.3 Release Notes
+== Kibana {version} Release Notes
-The 4.3 release of Kibana requires Elasticsearch 2.1 or later.
+The {version} release of Kibana requires Elasticsearch {esversion} or later.
-Using event times to create index names is *deprecated* in this release of Kibana. Support for this functionality will be
-removed entirely in the next major Kibana release. Elasticsearch 2.1 includes sophisticated date parsing APIs that Kibana
-uses to determine date information, removing the need to specify dates in the index pattern name.
+[float]
+[[breaking]]
+== Breaking Changes
+
+// * {k4issue}5591[Issue 5591]: The command-line plugin tool no longer supports Github.
[float]
[[enhancements]]
== Enhancements
-* {k4issue}5109[Issue 5109]: Adds custom JSON and filter alias naming for filters.
-* {k4issue}1726[Issue 1726]: Adds a color field formatter for value ranges in numeric fields.
-* {k4issue}4342[Issue 4342]: Increased performance for wildcard indices.
-* {k4issue}1600[Issue 1600]: Support for global time zones.
-* {k4pull}5275[Pull Request 5275]: Highlighting values in Discover can now be disabled.
-* {k4issue}5212[Issue 5212]: Adds support for multiple certificate authorities.
-* {k4issue}2716[Issue 2716]: The open/closed position of the spy panel now persists across UI state changes.
+// * {k4issue}6387[Issue 6387]: A new look for Kibana: new logo and UI, improved layout and navigation.
[float]
[[bugfixes]]
== Bug Fixes
-* {k4issue}5165[Issue 5165]: Resolves a display error in embedded views.
-* {k4issue}5021[Issue 5021]: Improves visualization dimming for dashboards with auto-refresh.
+// * {k4issue}5914[Issue 5914]: Adds ability to aggregate on unindexed fields.
+
+[float]
+[[plugins-apis]]
+== Plugins, APIs, and Development Infrastructure
+
+NOTE: The items in this section are not a complete list of the internal changes relating to development in Kibana. Plugin
+framework and APIs are not formally documented and not guaranteed to be backward compatible from release to release.
+
+// * {k4issue}5198[Issue 5198]: Injects buttons from the plugin to the navigation bar.
diff --git a/docs/settings.asciidoc b/docs/settings.asciidoc
index 4a89fc38fa514..1b99bec2a9307 100644
--- a/docs/settings.asciidoc
+++ b/docs/settings.asciidoc
@@ -35,11 +35,17 @@ list.
contains time-based events* option and select the index field that contains the timestamp. Kibana reads the index
mapping to list all of the fields that contain a timestamp.
+. By default, Kibana restricts wildcard expansion of time-based index patterns to indices with data within the currently
+selected time range. Click *Do not expand index pattern when search* to disable this behavior.
+
. Click *Create* to add the index pattern.
. To designate the new pattern as the default pattern to load when you view the Discover tab, click the *favorite*
button.
+NOTE: When you define an index pattern, indices that match that pattern must exist in Elasticsearch. Those indices must
+contain data.
+
To use an event time in an index name, enclose the static text in the pattern and specify the date format using the
tokens described in the following table.
@@ -177,11 +183,13 @@ include::string-formatter.asciidoc[]
==== Numeric Field Formatters
-Numeric fields support the `Url`, `String`, `Bytes`, `Number`, `Percentage`, and `Color` formatters.
+Numeric fields support the `Url`, `Bytes`, `Duration`, `Number`, `Percentage`, `String`, and `Color` formatters.
+
+include::url-formatter.asciidoc[]
include::string-formatter.asciidoc[]
-include::url-formatter.asciidoc[]
+include::duration-formatter.asciidoc[]
include::color-formatter.asciidoc[]
@@ -195,6 +203,8 @@ Scripted fields compute data on the fly from the data in your Elasticsearch indi
the Discover tab as part of the document data, and you can use scripted fields in your visualizations.
Scripted field values are computed at query time so they aren't indexed and cannot be searched.
+NOTE: Kibana cannot query scripted fields.
+
WARNING: Computing data on the fly with scripted fields can be very resource intensive and can have a direct impact on
Kibana's performance. Keep in mind that there's no built-in validation of a scripted field. If your scripts are
buggy, you'll get exceptions whenever you try to view the dynamically generated data.
@@ -268,6 +278,9 @@ The Kibana server reads properties from the `kibana.yml` file on startup. The de
on `localhost:5601`. To change the host or port number, or connect to Elasticsearch running on a different machine,
you'll need to update your `kibana.yml` file. You can also enable SSL and set a variety of other options.
+include::kibana-yml.asciidoc[]
+
+////
deprecated[4.2, The names of several Kibana server properties changed in the 4.2 release of Kibana. The previous names remain as functional aliases, but are now deprecated and will be removed in a future release of Kibana]
[horizontal]
@@ -363,6 +376,10 @@ deprecated[4.2, The names of several Kibana server properties changed in the 4.2
+
*default*: `500000`
+`elasticsearch.requestHeadersWhitelist:` added[5.0]:: List of Kibana client-side headers to send to Elasticsearch. To send *no* client-side headers, set this value to [] (an empty list).
++
+*default*: `[ 'authorization' ]`
+
`elasticsearch.shardTimeout` added[4.2]:: How long Elasticsearch should wait for responses from shards. Set to 0 to disable.
+
*alias*: `shard_timeout` deprecated[4.2]
@@ -397,6 +414,7 @@ you are using a self-signed certificate so the certificate can be verified. Disa
`logging.dest` added[4.2]:: The location where you want to store the Kibana's log output. If not specified, log output is written to standard output and not stored. Specifying a log file suppresses log writes to standard output.
+
*alias*: `log_file` deprecated[4.2]
+////
[[managing-saved-objects]]
=== Managing Saved Searches, Visualizations, and Dashboards
@@ -447,10 +465,12 @@ To export a set of objects:
. Click the selection box for the objects you want to export, or click the *Select All* box.
. Click *Export* to select a location to write the exported JSON.
+WARNING: Exported dashboards do not include their associated index patterns. Re-create the index patterns manually before
+importing saved dashboards to a Kibana instance running on another Elasticsearch cluster.
+
To import a set of objects:
. Go to *Settings > Objects*.
. Click *Import* to navigate to the JSON file representing the set of objects to import.
. Click *Open* after selecting the JSON file.
. If any objects in the set would overwrite objects already present in Kibana, confirm the overwrite.
-
diff --git a/docs/setup.asciidoc b/docs/setup.asciidoc
index 60a3124640bc9..74b87345bfb21 100644
--- a/docs/setup.asciidoc
+++ b/docs/setup.asciidoc
@@ -3,14 +3,14 @@
You can set up Kibana and start exploring your Elasticsearch indices in minutes.
All you need is:
-* Elasticsearch 2.1 or later
+* Elasticsearch {esversion}
* A modern web browser - http://www.elastic.co/subscriptions/matrix#matrix_browsers[Supported Browsers].
* Information about your Elasticsearch installation:
** URL of the Elasticsearch instance you want to connect to.
** Which Elasticsearch indices you want to search.
-NOTE: If your Elasticsearch installation is protected by http://www.elastic.co/overview/shield/[Shield] see
-{shield}/kibana.html#using-kibana4-with-shield[Shield with Kibana 4] for additional setup instructions.
+NOTE: If your Elasticsearch installation is protected by http://www.elastic.co/overview/shield/[{scyld}], see
+{shield}/kibana.html#using-kibana4-with-shield[{scyld} with Kibana] for additional setup instructions.
[float]
[[install]]
@@ -18,15 +18,16 @@ NOTE: If your Elasticsearch installation is protected by http://www.elastic.co/o
To get Kibana up and running:
-. Download the https://www.elastic.co/downloads/kibana[Kibana 4 binary package] for your platform.
+. Download the https://www.elastic.co/downloads/kibana[Kibana {version} binary package] for your platform.
. Extract the `.zip` or `tar.gz` archive file.
+. After installing, run Kibana from the install directory: `bin/kibana` (Linux/MacOSX) or `bin\kibana.bat` (Windows).
-// On Unix, you can instead run the package manager suited for your distribution.
-//
-// [float]
-// include::kibana-repositories.asciidoc[]
-//
-After installing, run Kibana from the install directory: `bin/kibana` (Linux/MacOSX) or `bin\kibana.bat` (Windows).
+On Unix, you can instead run the package manager suited for your distribution.
+
+////
+[float]
+include::kibana-repositories.asciidoc[]
+////
That's it! Kibana is now running on port 5601.
@@ -82,14 +83,14 @@ simply be the name of a single index.
reads the index mapping to list all of the fields that contain a timestamp. If your index doesn't have time-based data,
disable the *Index contains time-based events* option.
+
-WARNING: Using event times to create index names is *deprecated* in this release of Kibana. Support for this functionality
-will be removed entirely in the next major Kibana release. Elasticsearch 2.1 includes sophisticated date parsing APIs that
-Kibana uses to determine date information, removing the need to specify dates in the index pattern name.
+WARNING: Using event times to create index names is *deprecated* in this release of Kibana. Starting in the 2.1
+release, Elasticsearch includes sophisticated date parsing APIs that Kibana uses to determine date information,
+removing the need to specify dates in the index pattern name.
+
. Click *Create* to add the index pattern. This first pattern is automatically configured as the default.
When you have more than one index pattern, you can designate which one to use as the default from *Settings > Indices*.
-Voila! Kibana is now connected to your Elasticsearch data. Kibana displays a read-only list of fields configured for
+All done! Kibana is now connected to your Elasticsearch data. Kibana displays a read-only list of fields configured for
the matching index.
[float]
@@ -101,5 +102,5 @@ You're ready to dive in to your data:
* Chart and map your data from the <> page.
* Create and view custom dashboards from the <> page.
-For a brief tutorial that explores these core Kibana concepts, take a look at the <> page.
diff --git a/docs/string-formatter.asciidoc b/docs/string-formatter.asciidoc
index 0cde079122ba4..63232c0b7fbea 100644
--- a/docs/string-formatter.asciidoc
+++ b/docs/string-formatter.asciidoc
@@ -2,9 +2,10 @@ The `String` field formatter can apply the following transformations to the fiel
* Convert to lowercase
* Convert to uppercase
+* Convert to title case
* Apply the short dots transformation, which replaces the content before a `.` character with the first character of
that content, as in the following example:
[horizontal]
*Original*:: *Becomes*
-`com.organizations.project.ClassName`:: `c.o.p.ClassName`
\ No newline at end of file
+`com.organizations.project.ClassName`:: `c.o.p.ClassName`
diff --git a/docs/tilemap.asciidoc b/docs/tilemap.asciidoc
index 804abea1219b6..8bcf21cdf5c53 100644
--- a/docs/tilemap.asciidoc
+++ b/docs/tilemap.asciidoc
@@ -19,6 +19,8 @@ numeric field. Select a field from the drop-down.
*Unique Count*:: The {ref}search-aggregations-metrics-cardinality-aggregation.html[_cardinality_] aggregation returns
the number of unique values in a field. Select a field from the drop-down.
+Enter a string in the *Custom Label* field to change the display label.
+
The _buckets_ aggregations determine what information is being retrieved from your data set.
Before you choose a buckets aggregation, specify if you are splitting the chart or displaying the buckets as *Geo
@@ -48,7 +50,7 @@ intervals in the histogram.
*Range*:: With a {ref}search-aggregations-bucket-range-aggregation.html[_range_] aggregation, you can specify ranges
of values for a numeric field. Click *Add Range* to add a set of range endpoints. Click the red *(x)* symbol to remove
a range.
-After changing options, click the green *Apply changes* button to update your visualization, or the grey *Discard
+After changing options, click the *Apply changes* button to update your visualization, or the grey *Discard
changes* button to keep your visualization in its current state.
*Date Range*:: A {ref}search-aggregations-bucket-daterange-aggregation.html[_date range_] aggregation reports values
that are within a range of dates that you specify. You can specify the ranges for the dates using
@@ -71,12 +73,12 @@ based on the geohash coordinates.
NOTE: By default, the *Change precision on map zoom* box is checked. Uncheck the box to disable this behavior.
+Enter a string in the *Custom Label* field to change the display label.
+
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
-*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
*Include Pattern*:: Specify a pattern in this field to include in the results.
-*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
definition, as in the following example:
@@ -119,7 +121,7 @@ in the lower right corner.
* *WMS styles*: A comma-separated list of the styles to use in this visualization. Each map server provides its own styling
options.
-After changing options, click the green *Apply changes* button to update your visualization, or the grey *Discard
+After changing options, click the *Apply changes* button to update your visualization, or the grey *Discard
changes* button to keep your visualization in its current state.
[float]
diff --git a/docs/vertbar.asciidoc b/docs/vertbar.asciidoc
index d14b31fbff99d..e98362e964723 100644
--- a/docs/vertbar.asciidoc
+++ b/docs/vertbar.asciidoc
@@ -26,6 +26,8 @@ values field. Click *+Add* to add a values field.
You can add an aggregation by clicking the *+ Add Aggregation* button.
+Enter a string in the *Custom Label* field to change the display label.
+
The _buckets_ aggregations determine what information is being retrieved from your data set.
Before you choose a buckets aggregation, specify if you are splitting slices within a single chart or splitting into
@@ -34,12 +36,14 @@ if the splits are displayed in a row or a column by clicking the *Rows | Columns
include::x-axis-aggs.asciidoc[]
+include::color-picker.asciidoc[]
+
+Enter a string in the *Custom Label* field to change the display label.
+
You can click the *Advanced* link to display more customization options for your metrics or bucket aggregation:
*Exclude Pattern*:: Specify a pattern in this field to exclude from the results.
-*Exclude Pattern Flags*:: A standard set of Java flags for the exclusion pattern.
*Include Pattern*:: Specify a pattern in this field to include in the results.
-*Include Pattern Flags*:: A standard set of Java flags for the inclusion pattern.
*JSON Input*:: A text field where you can add specific JSON-formatted properties to merge with the aggregation
definition, as in the following example:
diff --git a/docs/visualize.asciidoc b/docs/visualize.asciidoc
index 780e182b5bf52..c598b2ea47acc 100644
--- a/docs/visualize.asciidoc
+++ b/docs/visualize.asciidoc
@@ -1,8 +1,8 @@
[[visualize]]
== Visualize
-You can use the _Visualize_ page to design data visualizations. You can save these visualizations, use
-them individually, or combine visualizations into a _dashboard_. A visualization can be based on one of the following
+You can use the _Visualize_ page to design data visualizations. You can save these visualizations, use them
+individually, or combine visualizations into a _dashboard_. A visualization can be based on one of the following
data source types:
* A new interactive search
@@ -15,9 +15,9 @@ Visualizations are based on the {ref}search-aggregations.html[aggregation] featu
[[createvis]]
=== Creating a New Visualization
-To start the New Visualization wizard, click on the *Visualize* tab at the top left of the page. If you are already
-creating a visualization, you can click the *New Visualization* button image:images/K4NewDocument.png[New Document
-button] in the toolbar to the right of the search bar. The wizard guides you through the following steps:
+Click on the *Visualize* image:images/visualize-icon.png[chart icon] tab in the left-hand navigation bar. If you are
+already creating a visualization, you can click the *New* button in the toolbar. To set up your visualization, follow
+these steps:
[float]
[[newvis01]]
@@ -38,8 +38,8 @@ dashboard.
<>:: Use vertical bar charts as a general-purpose chart.
You can also load a saved visualization that you created earlier. The saved visualization selector includes a text
-field to filter by visualization name and a link to the Object Editor, accessible through *Settings > Edit Saved
-Objects*, to manage your saved visualizations.
+field to filter by visualization name and a link to the Object Editor, accessible through *Settings > Objects*, to
+manage your saved visualizations.
If your new visualization is a Markdown widget, selecting that type takes you to a text entry field where you enter the
text to display in the widget. For all other types of visualization, selecting the type takes you to data source
@@ -128,7 +128,7 @@ inside each bucket, which in this example is a one-hour interval.
NOTE: Remember, each subsequent bucket slices the data from the previous bucket.
-To render the visualization on the _preview canvas_, click the green *Apply Changes* button at the top right of the
+To render the visualization on the _preview canvas_, click the *Apply Changes* button at the top right of the
Aggregation Builder.
You can learn more about aggregation and how altering the order of aggregations affects your visualizations
@@ -143,7 +143,7 @@ include::filter-pinning.asciidoc[]
===== Preview Canvas
The preview canvas displays a preview of the visualization you've defined in the aggregation builder. To refresh the
-visualization preview, clicking the *Refresh* button image:images/K4Refresh.png[Refresh button] on the toolbar.
+visualization preview, clicking the *Apply Changes* image:images/apply-changes-button.png[] button on the toolbar.
include::area.asciidoc[]
diff --git a/docs/x-axis-aggs.asciidoc b/docs/x-axis-aggs.asciidoc
index a39eb8a48a20b..3b9fd3e948623 100644
--- a/docs/x-axis-aggs.asciidoc
+++ b/docs/x-axis-aggs.asciidoc
@@ -28,8 +28,8 @@ remove a range.
or bottom _n_ elements of a given field to display, ordered by count or a custom metric.
*Filters*:: You can specify a set of {ref}/search-aggregations-bucket-filters-aggregation.html[_filters_] for the data.
You can specify a filter as a query string or in JSON format, just as in the Discover search bar. Click *Add Filter* to
-add another filter. Click the images:labelbutton.png[] *label* button to open the label field, where you can type in a
-name to display on the visualization.
+add another filter. Click the image:images/labelbutton.png[Label button icon] *label* button to open the label field, where
+you can type in a name to display on the visualization.
*Significant Terms*:: Displays the results of the experimental
{ref}/search-aggregations-bucket-significantterms-aggregation.html[_significant terms_] aggregation.
@@ -39,3 +39,5 @@ from the list of types.
When multiple aggregations are defined on a chart's axis, you can use the up or down arrows to the right of the
aggregation's type to change the aggregation's priority.
+
+Enter a string in the *Custom Label* field to change the display label.
diff --git a/docs/y-axis-aggs.asciidoc b/docs/y-axis-aggs.asciidoc
index 18029bb0904f5..f404abb42dbbb 100644
--- a/docs/y-axis-aggs.asciidoc
+++ b/docs/y-axis-aggs.asciidoc
@@ -21,4 +21,6 @@ aggregation returns the percentile rankings for the values in the numeric field
from the drop-down, then specify one or more percentile rank values in the *Values* fields. Click the *X* to remove a
values field. Click *+Add* to add a values field.
-You can add an aggregation by clicking the *+ Add Aggregation* button.
+You can add an aggregation by clicking the *+ Add Metrics* button.
+
+Enter a string in the *Custom Label* field to change the display label.
diff --git a/package.json b/package.json
index 82d78b0300578..68b3a4542d782 100644
--- a/package.json
+++ b/package.json
@@ -11,12 +11,11 @@
"dashboarding"
],
"private": false,
- "version": "4.4.0-snapshot",
+ "version": "5.0.0",
"build": {
"number": 8467,
"sha": "6cb7fec4e154faa0a4a3fee4b33dfef91b9870d9"
},
- "main": "src/server/KbnServer.js",
"homepage": "https://www.elastic.co/products/kibana",
"bugs": {
"url": "http://github.com/elastic/kibana/issues"
@@ -24,21 +23,22 @@
"license": "Apache-2.0",
"author": "Rashid Khan ",
"contributors": [
- "Spencer Alger ",
- "Matt Bargar ",
- "Jon Budzenski ",
"Chris Cowan ",
"Court Ewing ",
+ "Jim Unger ",
"Joe Fleming ",
+ "Jon Budzenski ",
+ "Juan Thomassie ",
"Khalah Jones-Golden ",
"Lukas Olson ",
- "Juan Thomassie ",
+ "Matt Bargar ",
+ "Nicolás Bevacqua ",
"Shelby Sturgis ",
- "Tim Sullivan ",
- "Jim Unger "
+ "Spencer Alger ",
+ "Tim Sullivan "
],
"scripts": {
- "test": "grunt test",
+ "test": "grunt test; grunt test:visualRegression",
"test:dev": "grunt test:dev",
"test:quick": "grunt test:quick",
"test:browser": "grunt test:browser",
@@ -48,23 +48,31 @@
"test:server": "grunt test:server",
"test:coverage": "grunt test:coverage",
"build": "grunt build",
- "start": "./bin/kibana --dev",
- "precommit": "grunt lintStagedFiles",
+ "start": "sh ./bin/kibana --dev",
+ "precommit": "grunt precommit",
"karma": "karma start",
"elasticsearch": "grunt esvm:dev:keepalive",
+ "elasticsearchWithPlugins": "grunt esvm:withPlugins:keepalive",
"lint": "grunt eslint:source",
- "lintroller": "grunt eslint:fixSource"
+ "lintroller": "grunt eslint:fixSource",
+ "makelogs": "makelogs",
+ "mocha": "mocha",
+ "mocha:debug": "mocha --debug-brk",
+ "sterilize": "grunt sterilize"
},
"repository": {
"type": "git",
"url": "https://github.com/elastic/kibana.git"
},
"dependencies": {
+ "@bigfunger/decompress-zip": "0.2.0-stripfix2",
+ "@elastic/datemath": "2.2.0",
"@spalger/angular-bootstrap": "0.12.1",
"@spalger/filesaver": "1.1.2",
"@spalger/leaflet-draw": "0.2.3",
"@spalger/leaflet-heat": "0.1.3",
"@spalger/numeral": "^2.0.0",
+ "@spalger/test-subj-selector": "0.2.1",
"@spalger/ui-ace": "0.2.3",
"angular": "1.4.7",
"angular-bootstrap-colorpicker": "3.0.19",
@@ -73,30 +81,34 @@
"ansicolors": "0.3.2",
"autoprefixer": "5.1.1",
"autoprefixer-loader": "2.0.0",
- "babel": "5.8.23",
- "babel-core": "5.8.23",
+ "babel": "5.8.38",
+ "babel-core": "5.8.38",
"babel-loader": "5.3.2",
- "babel-runtime": "5.8.20",
+ "babel-runtime": "5.8.38",
"bluebird": "2.9.34",
"boom": "2.8.0",
- "bootstrap": "3.3.5",
+ "bootstrap": "3.3.6",
"brace": "0.5.1",
- "bunyan": "1.4.0",
+ "bunyan": "1.7.1",
+ "clipboard": "1.5.5",
"commander": "2.8.1",
"css-loader": "0.17.0",
"d3": "3.5.6",
- "elasticsearch": "8.0.1",
- "elasticsearch-browser": "8.0.1",
+ "dragula": "3.7.0",
+ "elasticsearch": "10.1.2",
+ "elasticsearch-browser": "10.1.2",
+ "even-better": "7.0.2",
"expiry-js": "0.1.7",
"exports-loader": "0.6.2",
"expose-loader": "0.7.0",
"extract-text-webpack-plugin": "0.8.2",
"file-loader": "0.8.4",
"font-awesome": "4.4.0",
- "good": "6.3.0",
+ "glob-all": "3.0.1",
"good-squeeze": "2.1.0",
"gridster": "0.5.6",
"hapi": "8.8.1",
+ "httpolyglot": "0.1.1",
"imports-loader": "0.6.4",
"jade": "1.11.0",
"jade-loader": "0.7.1",
@@ -113,39 +125,45 @@
"marked": "0.3.3",
"minimatch": "2.0.10",
"mkdirp": "0.5.1",
- "moment": "2.10.6",
- "moment-timezone": "0.4.1",
+ "moment": "2.13.0",
+ "moment-timezone": "0.5.4",
+ "node-uuid": "1.4.7",
"raw-loader": "0.5.1",
"request": "2.61.0",
- "requirefrom": "0.2.0",
"rimraf": "2.4.3",
+ "rison-node": "1.0.0",
"rjs-repack-loader": "1.0.6",
"script-loader": "0.6.1",
- "semver": "4.3.6",
+ "semver": "5.1.0",
"style-loader": "0.12.3",
"tar": "2.2.0",
"url-loader": "0.5.6",
+ "validate-npm-package-name": "2.2.2",
"webpack": "1.12.1",
"webpack-directory-name-as-main": "1.0.0",
- "whatwg-fetch": "0.9.0"
+ "whatwg-fetch": "0.9.0",
+ "wreck": "6.2.0"
},
"devDependencies": {
+ "@elastic/eslint-config-kibana": "0.0.3",
"Nonsense": "0.1.2",
"angular-mocks": "1.4.7",
"auto-release-sinon": "1.0.3",
- "babel-eslint": "4.1.3",
- "chokidar": "1.0.5",
- "eslint": "1.5.1",
- "eslint-plugin-mocha": "1.0.0",
+ "babel-eslint": "4.1.8",
+ "chokidar": "1.4.3",
+ "dot": "1.0.3",
+ "elasticdump": "2.1.1",
+ "eslint": "1.10.3",
+ "eslint-plugin-mocha": "1.1.0",
+ "event-stream": "3.3.2",
"expect.js": "0.3.1",
"faker": "1.1.0",
- "glob": "4.5.3",
"grunt": "0.4.5",
"grunt-babel": "5.0.1",
"grunt-cli": "0.1.13",
"grunt-contrib-clean": "0.6.0",
"grunt-contrib-copy": "0.8.1",
- "grunt-esvm": "1.1.8",
+ "grunt-esvm": "3.2.1",
"grunt-karma": "0.12.0",
"grunt-run": "0.5.0",
"grunt-s3": "0.2.0-alpha.3",
@@ -153,31 +171,33 @@
"gruntify-eslint": "1.0.1",
"html-entities": "1.1.3",
"husky": "0.8.1",
+ "image-diff": "1.6.0",
"intern": "3.0.1",
"istanbul-instrumenter-loader": "0.1.3",
"karma": "0.13.9",
"karma-chrome-launcher": "0.2.0",
"karma-coverage": "0.5.1",
"karma-firefox-launcher": "0.1.6",
- "karma-growl-reporter": "0.1.1",
"karma-ie-launcher": "0.2.0",
"karma-mocha": "0.2.0",
"karma-safari-launcher": "0.1.1",
- "libesvm": "1.0.7",
"license-checker": "3.1.0",
- "load-grunt-config": "0.7.2",
+ "load-grunt-config": "0.19.1",
+ "makelogs": "3.0.0-beta3",
"marked-text-renderer": "0.1.0",
"mocha": "2.3.0",
+ "ncp": "2.0.0",
"nock": "2.10.0",
"npm": "2.11.0",
"portscanner": "1.0.0",
- "simple-git": "1.8.0",
+ "simple-git": "1.37.0",
"sinon": "1.17.2",
"source-map": "0.4.4",
- "wreck": "6.2.0"
+ "source-map-support": "0.4.0",
+ "supertest-as-promised": "2.0.2"
},
"engines": {
- "node": "0.12",
- "npm": "2.14.3"
+ "node": "4.4.4",
+ "npm": "2.15.1"
}
}
diff --git a/src/cli/Command.js b/src/cli/Command.js
deleted file mode 100644
index 6f1e6b98952e1..0000000000000
--- a/src/cli/Command.js
+++ /dev/null
@@ -1,95 +0,0 @@
-let _ = require('lodash');
-let Command = require('commander').Command;
-
-let red = require('./color').red;
-let yellow = require('./color').yellow;
-let help = require('./help');
-
-Command.prototype.error = function (err) {
- if (err && err.message) err = err.message;
-
- console.log(
-`
-${red(' ERROR ')} ${err}
-
-${help(this, ' ')}
-`
- );
-
- process.exit(64); // eslint-disable-line no-process-exit
-};
-
-Command.prototype.defaultHelp = function () {
- console.log(
-`
-${help(this, ' ')}
-
-`
- );
-
- process.exit(64); // eslint-disable-line no-process-exit
-};
-
-Command.prototype.unknownArgv = function (argv) {
- if (argv) this.__unknownArgv = argv;
- return this.__unknownArgv ? this.__unknownArgv.slice(0) : [];
-};
-
-/**
- * setup the command to accept arbitrary configuration via the cli
- * @return {[type]} [description]
- */
-Command.prototype.collectUnknownOptions = function () {
- let title = `Extra ${this._name} options`;
-
- this.allowUnknownOption();
- this.getUnknownOptions = function () {
- let opts = {};
- let unknowns = this.unknownArgv();
-
- while (unknowns.length) {
- let opt = unknowns.shift().split('=');
- if (opt[0].slice(0, 2) !== '--') {
- this.error(`${title} "${opt[0]}" must start with "--"`);
- }
-
- if (opt.length === 1) {
- if (!unknowns.length || unknowns[0][0] === '-') {
- this.error(`${title} "${opt[0]}" must have a value`);
- }
-
- opt.push(unknowns.shift());
- }
-
- let val = opt[1];
- try { val = JSON.parse(opt[1]); }
- catch (e) { val = opt[1]; }
-
- _.set(opts, opt[0].slice(2), val);
- }
-
- return opts;
- };
-
- return this;
-};
-
-Command.prototype.parseOptions = _.wrap(Command.prototype.parseOptions, function (parse, argv) {
- let opts = parse.call(this, argv);
- this.unknownArgv(opts.unknown);
- return opts;
-});
-
-Command.prototype.action = _.wrap(Command.prototype.action, function (action, fn) {
- return action.call(this, function (...args) {
- var ret = fn.apply(this, args);
- if (ret && typeof ret.then === 'function') {
- ret.then(null, function (e) {
- console.log('FATAL CLI ERROR', e.stack);
- process.exit(1);
- });
- }
- });
-});
-
-module.exports = Command;
diff --git a/src/cli/Log.js b/src/cli/Log.js
deleted file mode 100644
index 168c3873cb2cb..0000000000000
--- a/src/cli/Log.js
+++ /dev/null
@@ -1,16 +0,0 @@
-let _ = require('lodash');
-let ansicolors = require('ansicolors');
-
-let log = _.restParam(function (color, label, rest1) {
- console.log.apply(console, [color(` ${_.trim(label)} `)].concat(rest1));
-});
-
-let color = require('./color');
-
-module.exports = class Log {
- constructor(quiet, silent) {
- this.good = quiet || silent ? _.noop : _.partial(log, color.green);
- this.warn = quiet || silent ? _.noop : _.partial(log, color.yellow);
- this.bad = silent ? _.noop : _.partial(log, color.red);
- }
-};
diff --git a/src/cli/cli.js b/src/cli/cli.js
index 25cbf536b8bc6..f25608dec1cf7 100644
--- a/src/cli/cli.js
+++ b/src/cli/cli.js
@@ -1,11 +1,10 @@
-let _ = require('lodash');
+import _ from 'lodash';
+import pkg from '../utils/package_json';
+import Command from './command';
+import serveCommand from './serve/serve';
-let utils = require('requirefrom')('src/utils');
-let pkg = utils('packageJson');
-let Command = require('./Command');
-
-let argv = process.env.kbnWorkerArgv ? JSON.parse(process.env.kbnWorkerArgv) : process.argv.slice();
-let program = new Command('bin/kibana');
+const argv = process.env.kbnWorkerArgv ? JSON.parse(process.env.kbnWorkerArgv) : process.argv.slice();
+const program = new Command('bin/kibana');
program
.version(pkg.version)
@@ -15,15 +14,14 @@ program
);
// attach commands
-require('./serve/serve')(program);
-require('./plugin/plugin')(program);
+serveCommand(program);
program
.command('help ')
.description('Get the help for a specific command')
.action(function (cmdName) {
- var cmd = _.find(program.commands, { _name: cmdName });
- if (!cmd) return this.error(`unknown command ${cmdName}`);
+ const cmd = _.find(program.commands, { _name: cmdName });
+ if (!cmd) return program.error(`unknown command ${cmdName}`);
cmd.help();
});
@@ -34,7 +32,7 @@ program
});
// check for no command name
-var subCommand = argv[2] && !String(argv[2][0]).match(/^-|^\.|\//);
+const subCommand = argv[2] && !String(argv[2][0]).match(/^-|^\.|\//);
if (!subCommand) {
if (_.intersection(argv.slice(2), ['-h', '--help']).length) {
diff --git a/src/cli/cluster/ClusterManager.js b/src/cli/cluster/ClusterManager.js
deleted file mode 100644
index 5bce6a5832c66..0000000000000
--- a/src/cli/cluster/ClusterManager.js
+++ /dev/null
@@ -1,126 +0,0 @@
-let cluster = require('cluster');
-let { join } = require('path');
-let { debounce, compact, invoke, bindAll, once } = require('lodash');
-
-let Log = require('../Log');
-let Worker = require('./Worker');
-
-module.exports = class ClusterManager {
- constructor(opts) {
- this.log = new Log(opts.quiet, opts.silent);
- this.addedCount = 0;
-
- this.workers = [
- this.optimizer = new Worker({
- type: 'optmzr',
- title: 'optimizer',
- log: this.log,
- argv: compact([
- '--plugins.initialize=false',
- '--server.autoListen=false'
- ]),
- watch: false
- }),
-
- this.server = new Worker({
- type: 'server',
- log: this.log
- })
- ];
-
- // broker messages between workers
- this.workers.forEach((worker) => {
- worker.on('broadcast', (msg) => {
- this.workers.forEach((to) => {
- if (to !== worker && to.online) {
- to.fork.send(msg);
- }
- });
- });
- });
-
- bindAll(this, 'onWatcherAdd', 'onWatcherError', 'onWatcherChange');
-
- if (opts.watch) this.setupWatching();
- else this.startCluster();
- }
-
- startCluster() {
- this.setupManualRestart();
- invoke(this.workers, 'start');
- }
-
- setupWatching() {
- var chokidar = require('chokidar');
- let utils = require('requirefrom')('src/utils');
- let fromRoot = utils('fromRoot');
-
- this.watcher = chokidar.watch([
- 'src/plugins',
- 'src/server',
- 'src/ui',
- 'src/utils',
- 'config',
- 'installedPlugins'
- ], {
- cwd: fromRoot('.'),
- ignored: /[\\\/](\..*|node_modules|bower_components|public|__tests__)[\\\/]/
- });
-
- this.watcher.on('add', this.onWatcherAdd);
- this.watcher.on('error', this.onWatcherError);
-
- this.watcher.on('ready', once(() => {
- // start sending changes to workers
- this.watcher.removeListener('add', this.onWatcherAdd);
- this.watcher.on('all', this.onWatcherChange);
-
- this.log.good('watching for changes', `(${this.addedCount} files)`);
- this.startCluster();
- }));
- }
-
- setupManualRestart() {
- let readline = require('readline');
- let rl = readline.createInterface(process.stdin, process.stdout);
-
- let nls = 0;
- let clear = () => nls = 0;
- let clearSoon = debounce(clear, 2000);
-
- rl.setPrompt('');
- rl.prompt();
-
- rl.on('line', line => {
- nls = nls + 1;
-
- if (nls >= 2) {
- clearSoon.cancel();
- clear();
- this.server.start();
- } else {
- clearSoon();
- }
-
- rl.prompt();
- });
-
- rl.on('SIGINT', () => {
- rl.pause();
- process.kill(process.pid, 'SIGINT');
- });
- }
-
- onWatcherAdd() {
- this.addedCount += 1;
- }
-
- onWatcherChange(e, path) {
- invoke(this.workers, 'onChange', path);
- }
-
- onWatcherError(err) {
- this.log.bad('failed to watch files!\n', err.stack);
- process.exit(1); // eslint-disable-line no-process-exit
- }
-};
diff --git a/src/cli/cluster/Worker.js b/src/cli/cluster/Worker.js
deleted file mode 100644
index ab57e350a3285..0000000000000
--- a/src/cli/cluster/Worker.js
+++ /dev/null
@@ -1,121 +0,0 @@
-let _ = require('lodash');
-let cluster = require('cluster');
-let { resolve } = require('path');
-let { EventEmitter } = require('events');
-
-let fromRoot = require('../../utils/fromRoot');
-
-let cliPath = fromRoot('src/cli');
-let baseArgs = _.difference(process.argv.slice(2), ['--no-watch']);
-let baseArgv = [process.execPath, cliPath].concat(baseArgs);
-
-cluster.setupMaster({
- exec: cliPath,
- silent: false
-});
-
-let dead = fork => {
- return fork.isDead() || fork.killed;
-};
-
-let kill = fork => {
- // fork.kill() waits for process to disconnect, but causes occasional
- // "ipc disconnected" errors and is too slow for the proc's "exit" event
- fork.process.kill();
- fork.killed = true;
-};
-
-module.exports = class Worker extends EventEmitter {
- constructor(opts) {
- opts = opts || {};
- super();
-
- this.log = opts.log;
- this.type = opts.type;
- this.title = opts.title || opts.type;
- this.watch = (opts.watch !== false);
- this.startCount = 0;
- this.online = false;
- this.changes = [];
-
- let argv = _.union(baseArgv, opts.argv || []);
- this.env = {
- kbnWorkerType: this.type,
- kbnWorkerArgv: JSON.stringify(argv)
- };
-
- _.bindAll(this, ['onExit', 'onMessage', 'onOnline', 'onDisconnect', 'shutdown', 'start']);
-
- this.start = _.debounce(this.start, 25);
- cluster.on('exit', this.onExit);
- process.on('exit', this.shutdown);
- }
-
- onExit(fork, code) {
- if (this.fork !== fork) return;
-
- // our fork is gone, clear our ref so we don't try to talk to it anymore
- this.fork = null;
-
- if (code) {
- this.log.bad(`${this.title} crashed`, 'with status code', code);
- if (!this.watch) process.exit(code);
- } else {
- // restart after graceful shutdowns
- this.start();
- }
- }
-
- onChange(path) {
- if (!this.watch) return;
- this.changes.push(path);
- this.start();
- }
-
- shutdown() {
- if (this.fork && !dead(this.fork)) {
- kill(this.fork);
- this.fork.removeListener('message', this.onMessage);
- this.fork.removeListener('online', this.onOnline);
- this.fork.removeListener('disconnect', this.onDisconnect);
- }
- }
-
- onMessage(msg) {
- if (!_.isArray(msg) || msg[0] !== 'WORKER_BROADCAST') return;
- this.emit('broadcast', msg[1]);
- }
-
- onOnline() {
- this.online = true;
- }
-
- onDisconnect() {
- this.online = false;
- }
-
- flushChangeBuffer() {
- let files = _.unique(this.changes.splice(0));
- let prefix = files.length > 1 ? '\n - ' : '';
- return files.reduce(function (list, file) {
- return `${list || ''}${prefix}"${file}"`;
- }, '');
- }
-
- start() {
- // once "exit" event is received with 0 status, start() is called again
- if (this.fork) return this.shutdown();
-
- if (this.changes.length) {
- this.log.warn(`restarting ${this.title}`, `due to changes in ${this.flushChangeBuffer()}`);
- }
- else if (this.startCount++) {
- this.log.warn(`restarting ${this.title}...`);
- }
-
- this.fork = cluster.fork(this.env);
- this.fork.on('message', this.onMessage);
- this.fork.on('online', this.onOnline);
- this.fork.on('disconnect', this.onDisconnect);
- }
-};
diff --git a/src/cli/cluster/__tests__/_mock_cluster_fork.js b/src/cli/cluster/__tests__/_mock_cluster_fork.js
new file mode 100644
index 0000000000000..2671ca08bdb9a
--- /dev/null
+++ b/src/cli/cluster/__tests__/_mock_cluster_fork.js
@@ -0,0 +1,44 @@
+import EventEmitter from 'events';
+import { assign, random } from 'lodash';
+import sinon from 'sinon';
+import cluster from 'cluster';
+import { delay } from 'bluebird';
+
+export default class MockClusterFork extends EventEmitter {
+ constructor() {
+ super();
+
+ let dead = true;
+
+ function wait() {
+ return delay(random(10, 250));
+ }
+
+ assign(this, {
+ process: {
+ kill: sinon.spy(() => {
+ (async () => {
+ await wait();
+ this.emit('disconnect');
+ await wait();
+ dead = true;
+ this.emit('exit');
+ cluster.emit('exit', this, this.exitCode || 0);
+ }());
+ }),
+ },
+ isDead: sinon.spy(() => dead),
+ send: sinon.stub()
+ });
+
+ sinon.spy(this, 'on');
+ sinon.spy(this, 'removeListener');
+ sinon.spy(this, 'emit');
+
+ (async () => {
+ await wait();
+ dead = false;
+ this.emit('online');
+ }());
+ }
+}
diff --git a/src/cli/cluster/__tests__/cluster_manager.js b/src/cli/cluster/__tests__/cluster_manager.js
new file mode 100644
index 0000000000000..ae9ef7080981a
--- /dev/null
+++ b/src/cli/cluster/__tests__/cluster_manager.js
@@ -0,0 +1,59 @@
+import expect from 'expect.js';
+import sinon from 'auto-release-sinon';
+import cluster from 'cluster';
+import { ChildProcess } from 'child_process';
+import { sample, difference } from 'lodash';
+
+import ClusterManager from '../cluster_manager';
+import Worker from '../worker';
+
+describe('CLI cluster manager', function () {
+
+ function setup() {
+ sinon.stub(cluster, 'fork', function () {
+ return {
+ process: {
+ kill: sinon.stub(),
+ },
+ isDead: sinon.stub().returns(false),
+ removeListener: sinon.stub(),
+ on: sinon.stub(),
+ send: sinon.stub()
+ };
+ });
+
+ const manager = new ClusterManager({});
+ return manager;
+ }
+
+ it('has two workers', function () {
+ const manager = setup();
+
+ expect(manager.workers).to.have.length(2);
+ for (const worker of manager.workers) expect(worker).to.be.a(Worker);
+
+ expect(manager.optimizer).to.be.a(Worker);
+ expect(manager.server).to.be.a(Worker);
+ });
+
+ it('delivers broadcast messages to other workers', function () {
+ const manager = setup();
+
+ for (const worker of manager.workers) {
+ Worker.prototype.start.call(worker);// bypass the debounced start method
+ worker.onOnline();
+ }
+
+ const football = {};
+ const messenger = sample(manager.workers);
+
+ messenger.emit('broadcast', football);
+ for (const worker of manager.workers) {
+ if (worker === messenger) {
+ expect(worker.fork.send.callCount).to.be(0);
+ } else {
+ expect(worker.fork.send.firstCall.args[0]).to.be(football);
+ }
+ }
+ });
+});
diff --git a/src/cli/cluster/__tests__/worker.js b/src/cli/cluster/__tests__/worker.js
new file mode 100644
index 0000000000000..4d8f4f53af148
--- /dev/null
+++ b/src/cli/cluster/__tests__/worker.js
@@ -0,0 +1,198 @@
+import expect from 'expect.js';
+import sinon from 'auto-release-sinon';
+import cluster from 'cluster';
+import { ChildProcess } from 'child_process';
+import { difference, findIndex, sample } from 'lodash';
+import { fromNode as fn } from 'bluebird';
+
+import MockClusterFork from './_mock_cluster_fork';
+import Worker from '../worker';
+
+const workersToShutdown = [];
+
+function assertListenerAdded(emitter, event) {
+ sinon.assert.calledWith(emitter.on, event);
+}
+
+function assertListenerRemoved(emitter, event) {
+ sinon.assert.calledWith(
+ emitter.removeListener,
+ event,
+ emitter.on.args[findIndex(emitter.on.args, { 0: event })][1]
+ );
+}
+
+function setup(opts = {}) {
+ sinon.stub(cluster, 'fork', function () {
+ return new MockClusterFork();
+ });
+
+ const worker = new Worker(opts);
+ workersToShutdown.push(worker);
+ return worker;
+}
+
+describe('CLI cluster manager', function () {
+
+ afterEach(async function () {
+ for (const worker of workersToShutdown) {
+ if (worker.shutdown.restore) {
+ // if the shutdown method was stubbed, restore it first
+ worker.shutdown.restore();
+ }
+
+ await worker.shutdown();
+ }
+ });
+
+ describe('#onChange', function () {
+ context('opts.watch = true', function () {
+ it('restarts the fork', function () {
+ const worker = setup({ watch: true });
+ sinon.stub(worker, 'start');
+ worker.onChange('/some/path');
+ expect(worker.changes).to.eql(['/some/path']);
+ sinon.assert.calledOnce(worker.start);
+ });
+ });
+
+ context('opts.watch = false', function () {
+ it('does not restart the fork', function () {
+ const worker = setup({ watch: false });
+ sinon.stub(worker, 'start');
+ worker.onChange('/some/path');
+ expect(worker.changes).to.eql([]);
+ sinon.assert.notCalled(worker.start);
+ });
+ });
+ });
+
+ describe('#shutdown', function () {
+ context('after starting()', function () {
+ it('kills the worker and unbinds from message, online, and disconnect events', async function () {
+ const worker = setup();
+ await worker.start();
+ expect(worker).to.have.property('online', true);
+ const fork = worker.fork;
+ sinon.assert.notCalled(fork.process.kill);
+ assertListenerAdded(fork, 'message');
+ assertListenerAdded(fork, 'online');
+ assertListenerAdded(fork, 'disconnect');
+ worker.shutdown();
+ sinon.assert.calledOnce(fork.process.kill);
+ assertListenerRemoved(fork, 'message');
+ assertListenerRemoved(fork, 'online');
+ assertListenerRemoved(fork, 'disconnect');
+ });
+ });
+
+ context('before being started', function () {
+ it('does nothing', function () {
+ const worker = setup();
+ worker.shutdown();
+ });
+ });
+ });
+
+ describe('#parseIncomingMessage()', function () {
+ context('on a started worker', function () {
+ it(`is bound to fork's message event`, async function () {
+ const worker = setup();
+ await worker.start();
+ sinon.assert.calledWith(worker.fork.on, 'message');
+ });
+ });
+
+ it('ignores non-array messsages', function () {
+ const worker = setup();
+ worker.parseIncomingMessage('some string thing');
+ worker.parseIncomingMessage(0);
+ worker.parseIncomingMessage(null);
+ worker.parseIncomingMessage(undefined);
+ worker.parseIncomingMessage({ like: 'an object' });
+ worker.parseIncomingMessage(/weird/);
+ });
+
+ it('calls #onMessage with message parts', function () {
+ const worker = setup();
+ const stub = sinon.stub(worker, 'onMessage');
+ worker.parseIncomingMessage([10, 100, 1000, 10000]);
+ sinon.assert.calledWith(stub, 10, 100, 1000, 10000);
+ });
+ });
+
+ describe('#onMessage', function () {
+ context('when sent WORKER_BROADCAST message', function () {
+ it('emits the data to be broadcasted', function () {
+ const worker = setup();
+ const data = {};
+ const stub = sinon.stub(worker, 'emit');
+ worker.onMessage('WORKER_BROADCAST', data);
+ sinon.assert.calledWithExactly(stub, 'broadcast', data);
+ });
+ });
+
+ context('when sent WORKER_LISTENING message', function () {
+ it('sets the listening flag and emits the listening event', function () {
+ const worker = setup();
+ const data = {};
+ const stub = sinon.stub(worker, 'emit');
+ expect(worker).to.have.property('listening', false);
+ worker.onMessage('WORKER_LISTENING');
+ expect(worker).to.have.property('listening', true);
+ sinon.assert.calledWithExactly(stub, 'listening');
+ });
+ });
+
+ context('when passed an unkown message', function () {
+ it('does nothing', function () {
+ const worker = setup();
+ worker.onMessage('asdlfkajsdfahsdfiohuasdofihsdoif');
+ worker.onMessage({});
+ worker.onMessage(23049283094);
+ });
+ });
+ });
+
+ describe('#start', function () {
+ context('when not started', function () {
+ it('creates a fork and waits for it to come online', async function () {
+ const worker = setup();
+
+ sinon.spy(worker, 'on');
+
+ await worker.start();
+
+ sinon.assert.calledOnce(cluster.fork);
+ sinon.assert.calledWith(worker.on, 'fork:online');
+ });
+
+ it('listens for cluster and process "exit" events', async function () {
+ const worker = setup();
+
+ sinon.spy(process, 'on');
+ sinon.spy(cluster, 'on');
+
+ await worker.start();
+
+ sinon.assert.calledOnce(cluster.on);
+ sinon.assert.calledWith(cluster.on, 'exit');
+ sinon.assert.calledOnce(process.on);
+ sinon.assert.calledWith(process.on, 'exit');
+ });
+ });
+
+ context('when already started', function () {
+ it('calls shutdown and waits for the graceful shutdown to cause a restart', async function () {
+ const worker = setup();
+ await worker.start();
+ sinon.spy(worker, 'shutdown');
+ sinon.spy(worker, 'on');
+
+ worker.start();
+ sinon.assert.calledOnce(worker.shutdown);
+ sinon.assert.calledWith(worker.on, 'online');
+ });
+ });
+ });
+});
diff --git a/src/cli/cluster/base_path_proxy.js b/src/cli/cluster/base_path_proxy.js
new file mode 100644
index 0000000000000..ca0cde62bdbf1
--- /dev/null
+++ b/src/cli/cluster/base_path_proxy.js
@@ -0,0 +1,120 @@
+import { Server } from 'hapi';
+import { notFound } from 'boom';
+import { merge, sample } from 'lodash';
+import { format as formatUrl } from 'url';
+import { map, fromNode } from 'bluebird';
+import { Agent as HttpsAgent } from 'https';
+import { readFileSync } from 'fs';
+
+import Config from '../../server/config/config';
+import setupConnection from '../../server/http/setup_connection';
+import setupLogging from '../../server/logging';
+
+const alphabet = 'abcdefghijklmnopqrztuvwxyz'.split('');
+
+export default class BasePathProxy {
+ constructor(clusterManager, userSettings) {
+ this.clusterManager = clusterManager;
+ this.server = new Server();
+
+ const config = Config.withDefaultSchema(userSettings);
+
+ this.targetPort = config.get('dev.basePathProxyTarget');
+ this.basePath = config.get('server.basePath');
+
+ const { cert } = config.get('server.ssl');
+ if (cert) {
+ this.proxyAgent = new HttpsAgent({
+ ca: readFileSync(cert)
+ });
+ }
+
+ if (!this.basePath) {
+ this.basePath = `/${sample(alphabet, 3).join('')}`;
+ config.set('server.basePath', this.basePath);
+ }
+
+ setupLogging(null, this.server, config);
+ setupConnection(null, this.server, config);
+ this.setupRoutes();
+ }
+
+ setupRoutes() {
+ const { clusterManager, server, basePath, targetPort } = this;
+
+ server.route({
+ method: 'GET',
+ path: '/',
+ handler(req, reply) {
+ return reply.redirect(basePath);
+ }
+ });
+
+ server.route({
+ method: '*',
+ path: `${basePath}/{kbnPath*}`,
+ config: {
+ pre: [
+ (req, reply) => {
+ map(clusterManager.workers, worker => {
+ if (worker.type === 'server' && !worker.listening && !worker.crashed) {
+ return fromNode(cb => {
+ const done = () => {
+ worker.removeListener('listening', done);
+ worker.removeListener('crashed', done);
+ cb();
+ };
+
+ worker.on('listening', done);
+ worker.on('crashed', done);
+ });
+ }
+ })
+ .return(undefined)
+ .nodeify(reply);
+ }
+ ],
+ },
+ handler: {
+ proxy: {
+ passThrough: true,
+ xforward: true,
+ agent: this.proxyAgent,
+ mapUri(req, callback) {
+ callback(null, formatUrl({
+ protocol: server.info.protocol,
+ hostname: server.info.host,
+ port: targetPort,
+ pathname: req.params.kbnPath,
+ query: req.query,
+ }));
+ }
+ }
+ }
+ });
+
+ server.route({
+ method: '*',
+ path: `/{oldBasePath}/{kbnPath*}`,
+ handler(req, reply) {
+ const {oldBasePath, kbnPath = ''} = req.params;
+
+ const isGet = req.method === 'get';
+ const isBasePath = oldBasePath.length === 3;
+ const isApp = kbnPath.slice(0, 4) === 'app/';
+
+ if (isGet && isBasePath && isApp) {
+ return reply.redirect(`${basePath}/${kbnPath}`);
+ }
+
+ return reply(notFound());
+ }
+ });
+ }
+
+ async listen() {
+ await fromNode(cb => this.server.start(cb));
+ this.server.log(['listening', 'info'], `basePath Proxy running at ${this.server.info.uri}${this.basePath}`);
+ }
+
+}
diff --git a/src/cli/cluster/cluster_manager.js b/src/cli/cluster/cluster_manager.js
new file mode 100644
index 0000000000000..ab7243d9ff78c
--- /dev/null
+++ b/src/cli/cluster/cluster_manager.js
@@ -0,0 +1,161 @@
+import cluster from 'cluster';
+const { join, resolve } = require('path');
+const { format: formatUrl } = require('url');
+import Hapi from 'hapi';
+const { debounce, compact, get, invoke, bindAll, once, sample, uniq } = require('lodash');
+
+import Log from '../log';
+import Worker from './worker';
+import BasePathProxy from './base_path_proxy';
+
+process.env.kbnWorkerType = 'managr';
+
+module.exports = class ClusterManager {
+ constructor(opts = {}, settings = {}) {
+ this.log = new Log(opts.quiet, opts.silent);
+ this.addedCount = 0;
+
+ const serverArgv = [];
+ const optimizerArgv = [
+ '--plugins.initialize=false',
+ '--server.autoListen=false',
+ ];
+
+ if (opts.basePath) {
+ this.basePathProxy = new BasePathProxy(this, settings);
+
+ optimizerArgv.push(
+ `--server.basePath=${this.basePathProxy.basePath}`
+ );
+
+ serverArgv.push(
+ `--server.port=${this.basePathProxy.targetPort}`,
+ `--server.basePath=${this.basePathProxy.basePath}`
+ );
+ }
+
+ this.workers = [
+ this.optimizer = new Worker({
+ type: 'optmzr',
+ title: 'optimizer',
+ log: this.log,
+ argv: optimizerArgv,
+ watch: false
+ }),
+
+ this.server = new Worker({
+ type: 'server',
+ log: this.log,
+ argv: serverArgv
+ })
+ ];
+
+ // broker messages between workers
+ this.workers.forEach((worker) => {
+ worker.on('broadcast', (msg) => {
+ this.workers.forEach((to) => {
+ if (to !== worker && to.online) {
+ to.fork.send(msg);
+ }
+ });
+ });
+ });
+
+ bindAll(this, 'onWatcherAdd', 'onWatcherError', 'onWatcherChange');
+
+ if (opts.watch) {
+ this.setupWatching([
+ ...settings.plugins.paths,
+ ...settings.plugins.scanDirs
+ ]);
+ }
+
+ else this.startCluster();
+ }
+
+ startCluster() {
+ this.setupManualRestart();
+ invoke(this.workers, 'start');
+ if (this.basePathProxy) {
+ this.basePathProxy.listen();
+ }
+ }
+
+ setupWatching(extraPaths) {
+ const chokidar = require('chokidar');
+ const fromRoot = require('../../utils/from_root');
+
+ const watchPaths = uniq(
+ [
+ fromRoot('src/plugins'),
+ fromRoot('src/server'),
+ fromRoot('src/ui'),
+ fromRoot('src/utils'),
+ fromRoot('config'),
+ ...extraPaths
+ ]
+ .map(path => resolve(path))
+ );
+
+ this.watcher = chokidar.watch(watchPaths, {
+ cwd: fromRoot('.'),
+ ignored: /[\\\/](\..*|node_modules|bower_components|public|__tests__)[\\\/]/
+ });
+
+ this.watcher.on('add', this.onWatcherAdd);
+ this.watcher.on('error', this.onWatcherError);
+
+ this.watcher.on('ready', once(() => {
+ // start sending changes to workers
+ this.watcher.removeListener('add', this.onWatcherAdd);
+ this.watcher.on('all', this.onWatcherChange);
+
+ this.log.good('watching for changes', `(${this.addedCount} files)`);
+ this.startCluster();
+ }));
+ }
+
+ setupManualRestart() {
+ const readline = require('readline');
+ const rl = readline.createInterface(process.stdin, process.stdout);
+
+ let nls = 0;
+ const clear = () => nls = 0;
+ const clearSoon = debounce(clear, 2000);
+
+ rl.setPrompt('');
+ rl.prompt();
+
+ rl.on('line', line => {
+ nls = nls + 1;
+
+ if (nls >= 2) {
+ clearSoon.cancel();
+ clear();
+ this.server.start();
+ } else {
+ clearSoon();
+ }
+
+ rl.prompt();
+ });
+
+ rl.on('SIGINT', () => {
+ rl.pause();
+ process.kill(process.pid, 'SIGINT');
+ });
+ }
+
+ onWatcherAdd() {
+ this.addedCount += 1;
+ }
+
+ onWatcherChange(e, path) {
+ invoke(this.workers, 'onChange', path);
+ }
+
+ onWatcherError(err) {
+ this.log.bad('failed to watch files!\n', err.stack);
+ process.exit(1); // eslint-disable-line no-process-exit
+ }
+};
diff --git a/src/cli/cluster/worker.js b/src/cli/cluster/worker.js
new file mode 100644
index 0000000000000..4108627edea65
--- /dev/null
+++ b/src/cli/cluster/worker.js
@@ -0,0 +1,166 @@
+import _ from 'lodash';
+import cluster from 'cluster';
+import { resolve } from 'path';
+import { EventEmitter } from 'events';
+
+import { BinderFor, fromRoot } from '../../utils';
+
+let cliPath = fromRoot('src/cli');
+let baseArgs = _.difference(process.argv.slice(2), ['--no-watch']);
+let baseArgv = [process.execPath, cliPath].concat(baseArgs);
+
+cluster.setupMaster({
+ exec: cliPath,
+ silent: false
+});
+
+let dead = fork => {
+ return fork.isDead() || fork.killed;
+};
+
+module.exports = class Worker extends EventEmitter {
+ constructor(opts) {
+ opts = opts || {};
+ super();
+
+ this.log = opts.log;
+ this.type = opts.type;
+ this.title = opts.title || opts.type;
+ this.watch = (opts.watch !== false);
+ this.startCount = 0;
+
+ // status flags
+ this.online = false; // the fork can accept messages
+ this.listening = false; // the fork is listening for connections
+ this.crashed = false; // the fork crashed
+
+ this.changes = [];
+
+ this.forkBinder = null; // defined when the fork is
+ this.clusterBinder = new BinderFor(cluster);
+ this.processBinder = new BinderFor(process);
+
+ let argv = _.union(baseArgv, opts.argv || []);
+ this.env = {
+ kbnWorkerType: this.type,
+ kbnWorkerArgv: JSON.stringify(argv)
+ };
+ }
+
+ onExit(fork, code) {
+ if (this.fork !== fork) return;
+
+ // we have our fork's exit, so stop listening for others
+ this.clusterBinder.destroy();
+
+ // our fork is gone, clear our ref so we don't try to talk to it anymore
+ this.fork = null;
+ this.forkBinder = null;
+
+ this.online = false;
+ this.listening = false;
+ this.emit('fork:exit');
+ this.crashed = code > 0;
+
+ if (this.crashed) {
+ this.emit('crashed');
+ this.log.bad(`${this.title} crashed`, 'with status code', code);
+ if (!this.watch) process.exit(code);
+ } else {
+ // restart after graceful shutdowns
+ this.start();
+ }
+ }
+
+ onChange(path) {
+ if (!this.watch) return;
+ this.changes.push(path);
+ this.start();
+ }
+
+ async shutdown() {
+ if (this.fork && !dead(this.fork)) {
+ // kill the fork
+ this.fork.process.kill();
+ this.fork.killed = true;
+
+ // stop listening to the fork, it's just going to die
+ this.forkBinder.destroy();
+
+ // we don't need to react to process.exit anymore
+ this.processBinder.destroy();
+
+ // wait until the cluster reports this fork has exitted, then resolve
+ await new Promise(resolve => this.once('fork:exit', resolve));
+ }
+ }
+
+ parseIncomingMessage(msg) {
+ if (!_.isArray(msg)) return;
+ this.onMessage(...msg);
+ }
+
+ onMessage(type, data) {
+ switch (type) {
+ case 'WORKER_BROADCAST':
+ this.emit('broadcast', data);
+ break;
+ case 'WORKER_LISTENING':
+ this.listening = true;
+ this.emit('listening');
+ break;
+ }
+ }
+
+ onOnline() {
+ this.online = true;
+ this.emit('fork:online');
+ this.crashed = false;
+ }
+
+ onDisconnect() {
+ this.online = false;
+ this.listening = false;
+ }
+
+ flushChangeBuffer() {
+ let files = _.unique(this.changes.splice(0));
+ let prefix = files.length > 1 ? '\n - ' : '';
+ return files.reduce(function (list, file) {
+ return `${list || ''}${prefix}"${file}"`;
+ }, '');
+ }
+
+ async start() {
+ if (this.fork) {
+ // once "exit" event is received with 0 status, start() is called again
+ this.shutdown();
+ await new Promise(cb => this.once('online', cb));
+ return;
+ }
+
+ if (this.changes.length) {
+ this.log.warn(`restarting ${this.title}`, `due to changes in ${this.flushChangeBuffer()}`);
+ }
+ else if (this.startCount++) {
+ this.log.warn(`restarting ${this.title}...`);
+ }
+
+ this.fork = cluster.fork(this.env);
+ this.forkBinder = new BinderFor(this.fork);
+
+ // when the fork sends a message, comes online, or looses it's connection, then react
+ this.forkBinder.on('message', msg => this.parseIncomingMessage(msg));
+ this.forkBinder.on('online', () => this.onOnline());
+ this.forkBinder.on('disconnect', () => this.onDisconnect());
+
+ // when the cluster says a fork has exitted, check if it is ours
+ this.clusterBinder.on('exit', (fork, code) => this.onExit(fork, code));
+
+ // when the process exits, make sure we kill our workers
+ this.processBinder.on('exit', () => this.shutdown());
+
+ // wait for the fork to report it is online before resolving
+ await new Promise(cb => this.once('fork:online', cb));
+ }
+};
diff --git a/src/cli/color.js b/src/cli/color.js
index ae53be757154e..56188418c0140 100644
--- a/src/cli/color.js
+++ b/src/cli/color.js
@@ -1,6 +1,6 @@
-var _ = require('lodash');
-var ansicolors = require('ansicolors');
+import _ from 'lodash';
+import ansicolors from 'ansicolors';
exports.green = _.flow(ansicolors.black, ansicolors.bgGreen);
exports.red = _.flow(ansicolors.white, ansicolors.bgRed);
diff --git a/src/cli/command.js b/src/cli/command.js
new file mode 100644
index 0000000000000..135d03e32b6c1
--- /dev/null
+++ b/src/cli/command.js
@@ -0,0 +1,95 @@
+import _ from 'lodash';
+
+import help from './help';
+import { Command } from 'commander';
+import { red } from './color';
+import { yellow } from './color';
+
+Command.prototype.error = function (err) {
+ if (err && err.message) err = err.message;
+
+ console.log(
+`
+${red(' ERROR ')} ${err}
+
+${help(this, ' ')}
+`
+ );
+
+ process.exit(64); // eslint-disable-line no-process-exit
+};
+
+Command.prototype.defaultHelp = function () {
+ console.log(
+`
+${help(this, ' ')}
+
+`
+ );
+
+ process.exit(64); // eslint-disable-line no-process-exit
+};
+
+Command.prototype.unknownArgv = function (argv) {
+ if (argv) this.__unknownArgv = argv;
+ return this.__unknownArgv ? this.__unknownArgv.slice(0) : [];
+};
+
+/**
+ * setup the command to accept arbitrary configuration via the cli
+ * @return {[type]} [description]
+ */
+Command.prototype.collectUnknownOptions = function () {
+ let title = `Extra ${this._name} options`;
+
+ this.allowUnknownOption();
+ this.getUnknownOptions = function () {
+ let opts = {};
+ let unknowns = this.unknownArgv();
+
+ while (unknowns.length) {
+ let opt = unknowns.shift().split('=');
+ if (opt[0].slice(0, 2) !== '--') {
+ this.error(`${title} "${opt[0]}" must start with "--"`);
+ }
+
+ if (opt.length === 1) {
+ if (!unknowns.length || unknowns[0][0] === '-') {
+ this.error(`${title} "${opt[0]}" must have a value`);
+ }
+
+ opt.push(unknowns.shift());
+ }
+
+ let val = opt[1];
+ try { val = JSON.parse(opt[1]); }
+ catch (e) { val = opt[1]; }
+
+ _.set(opts, opt[0].slice(2), val);
+ }
+
+ return opts;
+ };
+
+ return this;
+};
+
+Command.prototype.parseOptions = _.wrap(Command.prototype.parseOptions, function (parse, argv) {
+ let opts = parse.call(this, argv);
+ this.unknownArgv(opts.unknown);
+ return opts;
+});
+
+Command.prototype.action = _.wrap(Command.prototype.action, function (action, fn) {
+ return action.call(this, function (...args) {
+ let ret = fn.apply(this, args);
+ if (ret && typeof ret.then === 'function') {
+ ret.then(null, function (e) {
+ console.log('FATAL CLI ERROR', e.stack);
+ process.exit(1);
+ });
+ }
+ });
+});
+
+module.exports = Command;
diff --git a/src/cli/help.js b/src/cli/help.js
index 59b9548eefccd..89bc90a24cbd6 100644
--- a/src/cli/help.js
+++ b/src/cli/help.js
@@ -1,4 +1,4 @@
-var _ = require('lodash');
+import _ from 'lodash';
module.exports = function (command, spaces) {
if (!_.size(command.commands)) {
@@ -69,6 +69,6 @@ ${indent(cmd.optionHelp(), 2)}
}
function humanReadableArgName(arg) {
- var nameOutput = arg.name + (arg.variadic === true ? '...' : '');
+ let nameOutput = arg.name + (arg.variadic === true ? '...' : '');
return arg.required ? '<' + nameOutput + '>' : '[' + nameOutput + ']';
}
diff --git a/src/cli/index.js b/src/cli/index.js
index e7f562b571154..a0cd17d62f151 100644
--- a/src/cli/index.js
+++ b/src/cli/index.js
@@ -1,5 +1,5 @@
// load the babel options seperately so that they can modify the process.env
// before calling babel/register
-const babelOptions = require('../optimize/babelOptions').node;
+const babelOptions = require('../optimize/babel_options').node;
require('babel/register')(babelOptions);
require('./cli');
diff --git a/src/cli/log.js b/src/cli/log.js
new file mode 100644
index 0000000000000..bbbe3f2f21e89
--- /dev/null
+++ b/src/cli/log.js
@@ -0,0 +1,16 @@
+import _ from 'lodash';
+import ansicolors from 'ansicolors';
+
+let log = _.restParam(function (color, label, rest1) {
+ console.log.apply(console, [color(` ${_.trim(label)} `)].concat(rest1));
+});
+
+import color from './color';
+
+module.exports = class Log {
+ constructor(quiet, silent) {
+ this.good = quiet || silent ? _.noop : _.partial(log, color.green);
+ this.warn = quiet || silent ? _.noop : _.partial(log, color.yellow);
+ this.bad = silent ? _.noop : _.partial(log, color.red);
+ }
+};
diff --git a/src/cli/plugin/__tests__/plugin.js b/src/cli/plugin/__tests__/plugin.js
deleted file mode 100644
index 1a53ce3b61158..0000000000000
--- a/src/cli/plugin/__tests__/plugin.js
+++ /dev/null
@@ -1,80 +0,0 @@
-var expect = require('expect.js');
-var sinon = require('sinon');
-
-var plugin = require('../plugin');
-var installer = require('../pluginInstaller');
-var remover = require('../pluginRemover');
-var settingParser = require('../settingParser');
-
-describe('kibana cli', function () {
-
- describe('plugin installer', function () {
-
- describe('commander options', function () {
-
- var program = {
- command: function () { return program; },
- description: function () { return program; },
- option: function () { return program; },
- action: function () { return program; }
- };
-
- it('should define the command', function () {
- sinon.spy(program, 'command');
-
- plugin(program);
- expect(program.command.calledWith('plugin')).to.be(true);
-
- program.command.restore();
- });
-
- it('should define the description', function () {
- sinon.spy(program, 'description');
-
- plugin(program);
- expect(program.description.calledWith('Maintain Plugins')).to.be(true);
-
- program.description.restore();
- });
-
- it('should define the command line options', function () {
- var spy = sinon.spy(program, 'option');
-
- var options = [
- /-i/,
- /-r/,
- /-s/,
- /-u/,
- /-t/
- ];
-
- plugin(program);
-
- for (var i = 0; i < spy.callCount; i++) {
- var call = spy.getCall(i);
- for (var o = 0; o < options.length; o++) {
- var option = options[o];
- if (call.args[0].match(option)) {
- options.splice(o, 1);
- break;
- }
- }
- }
-
- expect(options).to.have.length(0);
- });
-
- it('should call the action function', function () {
- sinon.spy(program, 'action');
-
- plugin(program);
- expect(program.action.calledOnce).to.be(true);
-
- program.action.restore();
- });
-
- });
-
- });
-
-});
diff --git a/src/cli/plugin/__tests__/pluginCleaner.js b/src/cli/plugin/__tests__/pluginCleaner.js
deleted file mode 100644
index cbd28aa322efd..0000000000000
--- a/src/cli/plugin/__tests__/pluginCleaner.js
+++ /dev/null
@@ -1,147 +0,0 @@
-var expect = require('expect.js');
-var sinon = require('sinon');
-var fs = require('fs');
-var rimraf = require('rimraf');
-
-var pluginCleaner = require('../pluginCleaner');
-var pluginLogger = require('../pluginLogger');
-
-describe('kibana cli', function () {
-
- describe('plugin installer', function () {
-
- describe('pluginCleaner', function () {
-
- var settings = {
- workingPath: 'dummy'
- };
-
- describe('cleanPrevious', function () {
-
- var cleaner;
- var errorStub;
- var logger;
- var progress;
- var request;
-
- beforeEach(function () {
- errorStub = sinon.stub();
- logger = pluginLogger(false);
- cleaner = pluginCleaner(settings, logger);
- sinon.stub(logger, 'log');
- sinon.stub(logger, 'error');
- request = {
- abort: sinon.stub(),
- emit: sinon.stub()
- };
- });
-
- afterEach(function () {
- logger.log.restore();
- logger.error.restore();
- fs.statSync.restore();
- rimraf.sync.restore();
- });
-
- it('should resolve if the working path does not exist', function () {
- sinon.stub(rimraf, 'sync');
- sinon.stub(fs, 'statSync', function () {
- var error = new Error('ENOENT');
- error.code = 'ENOENT';
- throw error;
- });
-
- return cleaner.cleanPrevious(logger)
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(false);
- });
- });
-
- it('should rethrow any exception except ENOENT from fs.statSync', function () {
- sinon.stub(rimraf, 'sync');
- sinon.stub(fs, 'statSync', function () {
- var error = new Error('An Unhandled Error');
- throw error;
- });
-
- errorStub = sinon.stub();
- return cleaner.cleanPrevious(logger)
- .catch(errorStub)
- .then(function () {
- expect(errorStub.called).to.be(true);
- });
- });
-
- it('should log a message if there was a working directory', function () {
- sinon.stub(rimraf, 'sync');
- sinon.stub(fs, 'statSync');
-
- return cleaner.cleanPrevious(logger)
- .catch(errorStub)
- .then(function (data) {
- expect(logger.log.calledWith('Found previous install attempt. Deleting...')).to.be(true);
- });
- });
-
- it('should rethrow any exception from rimraf.sync', function () {
- sinon.stub(fs, 'statSync');
- sinon.stub(rimraf, 'sync', function () {
- throw new Error('I am an error thrown by rimraf');
- });
-
- errorStub = sinon.stub();
- return cleaner.cleanPrevious(logger)
- .catch(errorStub)
- .then(function () {
- expect(errorStub.called).to.be(true);
- });
- });
-
- it('should resolve if the working path is deleted', function () {
- sinon.stub(rimraf, 'sync');
- sinon.stub(fs, 'statSync');
-
- return cleaner.cleanPrevious(logger)
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(false);
- });
- });
-
- });
-
- describe('cleanError', function () {
- var cleaner;
- var logger;
- beforeEach(function () {
- logger = pluginLogger(false);
- cleaner = pluginCleaner(settings, logger);
- });
-
- afterEach(function () {
- rimraf.sync.restore();
- });
-
- it('should attempt to delete the working directory', function () {
- sinon.stub(rimraf, 'sync');
-
- cleaner.cleanError();
- expect(rimraf.sync.calledWith(settings.workingPath)).to.be(true);
- });
-
- it('should swallow any errors thrown by rimraf.sync', function () {
- sinon.stub(rimraf, 'sync', function () {
- throw new Error('Something bad happened.');
- });
-
- expect(cleaner.cleanError).withArgs(settings).to.not.throwError();
- });
-
- });
-
- });
-
- });
-
-});
diff --git a/src/cli/plugin/__tests__/pluginDownloader.js b/src/cli/plugin/__tests__/pluginDownloader.js
deleted file mode 100644
index 93a6fdfcd4226..0000000000000
--- a/src/cli/plugin/__tests__/pluginDownloader.js
+++ /dev/null
@@ -1,249 +0,0 @@
-var expect = require('expect.js');
-var sinon = require('sinon');
-var nock = require('nock');
-var glob = require('glob');
-var rimraf = require('rimraf');
-var { join } = require('path');
-
-var pluginLogger = require('../pluginLogger');
-var pluginDownloader = require('../pluginDownloader');
-
-describe('kibana cli', function () {
-
- describe('plugin downloader', function () {
-
- var testWorkingPath = join(__dirname, '.test.data');
- var logger;
- var downloader;
-
- beforeEach(function () {
- logger = pluginLogger(false);
- sinon.stub(logger, 'log');
- sinon.stub(logger, 'error');
- rimraf.sync(testWorkingPath);
- });
-
- afterEach(function () {
- logger.log.restore();
- logger.error.restore();
- rimraf.sync(testWorkingPath);
- });
-
- describe('_downloadSingle', function () {
-
- beforeEach(function () {
- downloader = pluginDownloader({}, logger);
- });
-
- afterEach(function () {
- });
-
- it.skip('should throw an ENOTFOUND error for a 404 error', function () {
- var couchdb = nock('http://www.files.com')
- .get('/plugin.tar.gz')
- .reply(404);
-
- var source = 'http://www.files.com/plugin.tar.gz';
-
- var errorStub = sinon.stub();
- return downloader._downloadSingle(source, testWorkingPath, 0, logger)
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(true);
- expect(errorStub.lastCall.args[0].message).to.match(/ENOTFOUND/);
-
- var files = glob.sync('**/*', { cwd: testWorkingPath });
- expect(files).to.eql([]);
- });
- });
-
- it.skip('should download and extract a valid plugin', function () {
- var filename = join(__dirname, 'replies/test-plugin-master.tar.gz');
- var couchdb = nock('http://www.files.com')
- .defaultReplyHeaders({
- 'content-length': '10'
- })
- .get('/plugin.tar.gz')
- .replyWithFile(200, filename);
-
- var source = 'http://www.files.com/plugin.tar.gz';
-
- return downloader._downloadSingle(source, testWorkingPath, 0, logger)
- .then(function (data) {
- var files = glob.sync('**/*', { cwd: testWorkingPath });
- var expected = [
- 'README.md',
- 'index.js',
- 'package.json',
- 'public',
- 'public/app.js'
- ];
- expect(files.sort()).to.eql(expected.sort());
- });
- });
-
- it('should abort the download and extraction for a corrupt archive.', function () {
- var filename = join(__dirname, 'replies/corrupt.tar.gz');
- var couchdb = nock('http://www.files.com')
- .get('/plugin.tar.gz')
- .replyWithFile(200, filename);
-
- var source = 'http://www.files.com/plugin.tar.gz';
-
- var errorStub = sinon.stub();
- return downloader._downloadSingle(source, testWorkingPath, 0, logger)
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(true);
-
- var files = glob.sync('**/*', { cwd: testWorkingPath });
- expect(files).to.eql([]);
- });
- });
-
- });
-
- describe('download', function () {
-
- beforeEach(function () {});
-
- afterEach(function () {});
-
- it.skip('should loop through bad urls until it finds a good one.', function () {
- var filename = join(__dirname, 'replies/test-plugin-master.tar.gz');
- var settings = {
- urls: [
- 'http://www.files.com/badfile1.tar.gz',
- 'http://www.files.com/badfile2.tar.gz',
- 'I am a bad uri',
- 'http://www.files.com/goodfile.tar.gz'
- ],
- workingPath: testWorkingPath,
- timeout: 0
- };
- downloader = pluginDownloader(settings, logger);
-
- var couchdb = nock('http://www.files.com')
- .defaultReplyHeaders({
- 'content-length': '10'
- })
- .get('/badfile1.tar.gz')
- .reply(404)
- .get('/badfile2.tar.gz')
- .reply(404)
- .get('/goodfile.tar.gz')
- .replyWithFile(200, filename);
-
- var errorStub = sinon.stub();
- return downloader.download(settings, logger)
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(false);
-
- expect(logger.log.getCall(0).args[0]).to.match(/badfile1.tar.gz/);
- expect(logger.log.getCall(1).args[0]).to.match(/badfile2.tar.gz/);
- expect(logger.log.getCall(2).args[0]).to.match(/I am a bad uri/);
- expect(logger.log.getCall(3).args[0]).to.match(/goodfile.tar.gz/);
- expect(logger.log.lastCall.args[0]).to.match(/complete/i);
-
- var files = glob.sync('**/*', { cwd: testWorkingPath });
- var expected = [
- 'README.md',
- 'index.js',
- 'package.json',
- 'public',
- 'public/app.js'
- ];
- expect(files.sort()).to.eql(expected.sort());
- });
- });
-
- it.skip('should stop looping through urls when it finds a good one.', function () {
- var filename = join(__dirname, 'replies/test-plugin-master.tar.gz');
- var settings = {
- urls: [
- 'http://www.files.com/badfile1.tar.gz',
- 'http://www.files.com/badfile2.tar.gz',
- 'http://www.files.com/goodfile.tar.gz',
- 'http://www.files.com/badfile3.tar.gz'
- ],
- workingPath: testWorkingPath,
- timeout: 0
- };
- downloader = pluginDownloader(settings, logger);
-
- var couchdb = nock('http://www.files.com')
- .defaultReplyHeaders({
- 'content-length': '10'
- })
- .get('/badfile1.tar.gz')
- .reply(404)
- .get('/badfile2.tar.gz')
- .reply(404)
- .get('/goodfile.tar.gz')
- .replyWithFile(200, filename)
- .get('/badfile3.tar.gz')
- .reply(404);
-
- var errorStub = sinon.stub();
- return downloader.download(settings, logger)
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(false);
-
- for (var i = 0; i < logger.log.callCount; i++) {
- expect(logger.log.getCall(i).args[0]).to.not.match(/badfile3.tar.gz/);
- }
-
- var files = glob.sync('**/*', { cwd: testWorkingPath });
- var expected = [
- 'README.md',
- 'index.js',
- 'package.json',
- 'public',
- 'public/app.js'
- ];
- expect(files.sort()).to.eql(expected.sort());
- });
- });
-
- it.skip('should throw an error when it doesn\'t find a good url.', function () {
- var settings = {
- urls: [
- 'http://www.files.com/badfile1.tar.gz',
- 'http://www.files.com/badfile2.tar.gz',
- 'http://www.files.com/badfile3.tar.gz'
- ],
- workingPath: testWorkingPath,
- timeout: 0
- };
- downloader = pluginDownloader(settings, logger);
-
- var couchdb = nock('http://www.files.com')
- .defaultReplyHeaders({
- 'content-length': '10'
- })
- .get('/badfile1.tar.gz')
- .reply(404)
- .get('/badfile2.tar.gz')
- .reply(404)
- .get('/badfile3.tar.gz')
- .reply(404);
-
- var errorStub = sinon.stub();
- return downloader.download(settings, logger)
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(true);
- expect(errorStub.lastCall.args[0].message).to.match(/not a valid/i);
-
- var files = glob.sync('**/*', { cwd: testWorkingPath });
- expect(files).to.eql([]);
- });
- });
-
- });
-
- });
-
-});
diff --git a/src/cli/plugin/__tests__/pluginInstaller.js b/src/cli/plugin/__tests__/pluginInstaller.js
deleted file mode 100644
index c563b066d2e1a..0000000000000
--- a/src/cli/plugin/__tests__/pluginInstaller.js
+++ /dev/null
@@ -1,73 +0,0 @@
-var expect = require('expect.js');
-var sinon = require('sinon');
-var nock = require('nock');
-var rimraf = require('rimraf');
-var fs = require('fs');
-var { join } = require('path');
-var Promise = require('bluebird');
-
-var pluginLogger = require('../pluginLogger');
-var pluginInstaller = require('../pluginInstaller');
-
-describe('kibana cli', function () {
-
- describe('plugin installer', function () {
-
- describe('pluginInstaller', function () {
-
- var logger;
- var testWorkingPath;
- var processExitStub;
- var statSyncStub;
- beforeEach(function () {
- processExitStub = undefined;
- statSyncStub = undefined;
- logger = pluginLogger(false);
- testWorkingPath = join(__dirname, '.test.data');
- rimraf.sync(testWorkingPath);
- sinon.stub(logger, 'log');
- sinon.stub(logger, 'error');
- });
-
- afterEach(function () {
- if (processExitStub) processExitStub.restore();
- if (statSyncStub) statSyncStub.restore();
- logger.log.restore();
- logger.error.restore();
- rimraf.sync(testWorkingPath);
- });
-
- it('should throw an error if the workingPath already exists.', function () {
- processExitStub = sinon.stub(process, 'exit');
- fs.mkdirSync(testWorkingPath);
-
- var settings = {
- pluginPath: testWorkingPath
- };
-
- var errorStub = sinon.stub();
- return pluginInstaller.install(settings, logger)
- .catch(errorStub)
- .then(function (data) {
- expect(logger.error.firstCall.args[0]).to.match(/already exists/);
- expect(process.exit.called).to.be(true);
- });
- });
-
- it('should rethrow any non "ENOENT" error from fs.', function () {
- statSyncStub = sinon.stub(fs, 'statSync', function () {
- throw new Error('This is unexpected.');
- });
-
- var settings = {
- pluginPath: testWorkingPath
- };
-
- expect(pluginInstaller.install).withArgs(settings, logger).to.throwException(/this is unexpected/i);
- });
-
- });
-
- });
-
-});
diff --git a/src/cli/plugin/__tests__/pluginLogger.js b/src/cli/plugin/__tests__/pluginLogger.js
deleted file mode 100644
index 9390c46089c04..0000000000000
--- a/src/cli/plugin/__tests__/pluginLogger.js
+++ /dev/null
@@ -1,128 +0,0 @@
-var expect = require('expect.js');
-var sinon = require('sinon');
-
-var pluginLogger = require('../pluginLogger');
-
-describe('kibana cli', function () {
-
- describe('plugin installer', function () {
-
- describe('logger', function () {
-
- var logger;
-
- describe('logger.log', function () {
-
- beforeEach(function () {
- sinon.spy(process.stdout, 'write');
- });
-
- afterEach(function () {
- process.stdout.write.restore();
- });
-
- it('should log messages to the console and append a new line', function () {
- logger = pluginLogger({ silent: false, quiet: false });
- var message = 'this is my message';
-
- logger.log(message);
-
- var callCount = process.stdout.write.callCount;
- expect(process.stdout.write.getCall(callCount - 2).args[0]).to.be(message);
- expect(process.stdout.write.getCall(callCount - 1).args[0]).to.be('\n');
- });
-
- it('should log messages to the console and append not append a new line', function () {
- logger = pluginLogger({ silent: false, quiet: false });
- for (var i = 0; i < 10; i++) {
- logger.log('.', true);
- }
- logger.log('Done!');
-
- expect(process.stdout.write.callCount).to.be(13);
- expect(process.stdout.write.getCall(0).args[0]).to.be('.');
- expect(process.stdout.write.getCall(1).args[0]).to.be('.');
- expect(process.stdout.write.getCall(2).args[0]).to.be('.');
- expect(process.stdout.write.getCall(3).args[0]).to.be('.');
- expect(process.stdout.write.getCall(4).args[0]).to.be('.');
- expect(process.stdout.write.getCall(5).args[0]).to.be('.');
- expect(process.stdout.write.getCall(6).args[0]).to.be('.');
- expect(process.stdout.write.getCall(7).args[0]).to.be('.');
- expect(process.stdout.write.getCall(8).args[0]).to.be('.');
- expect(process.stdout.write.getCall(9).args[0]).to.be('.');
- expect(process.stdout.write.getCall(10).args[0]).to.be('\n');
- expect(process.stdout.write.getCall(11).args[0]).to.be('Done!');
- expect(process.stdout.write.getCall(12).args[0]).to.be('\n');
- });
-
- it('should not log any messages when quiet is set', function () {
- logger = pluginLogger({ silent: false, quiet: true });
-
- var message = 'this is my message';
- logger.log(message);
-
- for (var i = 0; i < 10; i++) {
- logger.log('.', true);
- }
- logger.log('Done!');
-
- expect(process.stdout.write.callCount).to.be(0);
- });
-
- it('should not log any messages when silent is set', function () {
- logger = pluginLogger({ silent: true, quiet: false });
-
- var message = 'this is my message';
- logger.log(message);
-
- for (var i = 0; i < 10; i++) {
- logger.log('.', true);
- }
- logger.log('Done!');
-
- expect(process.stdout.write.callCount).to.be(0);
- });
-
- });
-
- describe('logger.error', function () {
-
- beforeEach(function () {
- sinon.spy(process.stderr, 'write');
- });
-
- afterEach(function () {
- process.stderr.write.restore();
- });
-
- it('should log error messages to the console and append a new line', function () {
- logger = pluginLogger({ silent: false, quiet: false });
- var message = 'this is my error';
-
- logger.error(message);
- expect(process.stderr.write.calledWith(message + '\n')).to.be(true);
- });
-
- it('should log error messages to the console when quiet is set', function () {
- logger = pluginLogger({ silent: false, quiet: true });
- var message = 'this is my error';
-
- logger.error(message);
- expect(process.stderr.write.calledWith(message + '\n')).to.be(true);
- });
-
- it('should not log any error messages when silent is set', function () {
- logger = pluginLogger({ silent: true, quiet: false });
- var message = 'this is my error';
-
- logger.error(message);
- expect(process.stderr.write.callCount).to.be(0);
- });
-
- });
-
- });
-
- });
-
-});
diff --git a/src/cli/plugin/__tests__/progressReporter.js b/src/cli/plugin/__tests__/progressReporter.js
deleted file mode 100644
index d943a45ffc12c..0000000000000
--- a/src/cli/plugin/__tests__/progressReporter.js
+++ /dev/null
@@ -1,301 +0,0 @@
-var expect = require('expect.js');
-var sinon = require('sinon');
-var progressReporter = require('../progressReporter');
-var pluginLogger = require('../pluginLogger');
-
-describe('kibana cli', function () {
-
- describe('plugin installer', function () {
-
- describe('progressReporter', function () {
-
- var logger;
- var progress;
- var request;
- beforeEach(function () {
- logger = pluginLogger(false);
- sinon.stub(logger, 'log');
- sinon.stub(logger, 'error');
- request = {
- abort: sinon.stub(),
- emit: sinon.stub()
- };
- progress = progressReporter(logger, request);
- });
-
- afterEach(function () {
- logger.log.restore();
- logger.error.restore();
- });
-
- describe('handleResponse', function () {
-
- describe('bad response codes', function () {
-
- function testErrorResponse(element, index, array) {
- it('should set the state to error for response code = ' + element, function () {
- progress.handleResponse({ statusCode: element });
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(true);
- expect(errorStub.lastCall.args[0].message).to.match(/ENOTFOUND/);
- });
- });
- }
-
- var badCodes = [
- '400', '401', '402', '403', '404', '405', '406', '407', '408', '409', '410',
- '411', '412', '413', '414', '415', '416', '417', '500', '501', '502', '503',
- '504', '505'
- ];
-
- badCodes.forEach(testErrorResponse);
- });
-
- describe('good response codes', function () {
-
- function testSuccessResponse(statusCode, index, array) {
- it('should set the state to success for response code = ' + statusCode, function () {
- progress.handleResponse({ statusCode: statusCode, headers: { 'content-length': 1000 } });
- progress.handleEnd();
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(false);
- expect(logger.log.getCall(logger.log.callCount - 2).args[0]).to.match(/1000/);
- });
- });
- }
-
-
- function testUnknownNumber(statusCode, index, array) {
- it('should log "unknown number of" for response code = ' + statusCode + ' without content-length header', function () {
- progress.handleResponse({ statusCode: statusCode, headers: {} });
- progress.handleEnd();
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(false);
- expect(logger.log.getCall(logger.log.callCount - 2).args[0]).to.match(/unknown number/);
- });
- });
- }
-
- var goodCodes = [
- '200', '201', '202', '203', '204', '205', '206', '300', '301', '302', '303',
- '304', '305', '306', '307'
- ];
-
- goodCodes.forEach(testSuccessResponse);
- goodCodes.forEach(testUnknownNumber);
-
- });
-
- });
-
- describe('handleData', function () {
-
- it('should do nothing if the reporter is in an error state', function () {
- progress.handleResponse({ statusCode: 400 });
- progress.handleData({ length: 100 });
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(progress.hasError()).to.be(true);
- expect(request.abort.called).to.be(true);
- expect(logger.log.callCount).to.be(0);
- });
- });
-
- it('should do nothing if handleResponse hasn\'t successfully executed yet', function () {
- progress.handleData({ length: 100 });
- progress.handleEnd();
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(logger.log.callCount).to.be(1);
- expect(logger.log.lastCall.args[0]).to.match(/complete/i);
- });
- });
-
- it('should do nothing if handleResponse was called without a content-length header', function () {
- progress.handleResponse({ statusCode: 200, headers: {} });
- progress.handleData({ length: 100 });
- progress.handleEnd();
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(logger.log.callCount).to.be(2);
- expect(logger.log.getCall(0).args[0]).to.match(/downloading/i);
- expect(logger.log.getCall(1).args[0]).to.match(/complete/i);
- });
- });
-
- it('should show a max of 20 dots for full prgress', function () {
- progress.handleResponse({ statusCode: 200, headers: { 'content-length': 1000 } });
- progress.handleData({ length: 1000 });
- progress.handleEnd();
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(logger.log.callCount).to.be(22);
- expect(logger.log.getCall(0).args[0]).to.match(/downloading/i);
- expect(logger.log.getCall(1).args[0]).to.be('.');
- expect(logger.log.getCall(2).args[0]).to.be('.');
- expect(logger.log.getCall(3).args[0]).to.be('.');
- expect(logger.log.getCall(4).args[0]).to.be('.');
- expect(logger.log.getCall(5).args[0]).to.be('.');
- expect(logger.log.getCall(6).args[0]).to.be('.');
- expect(logger.log.getCall(7).args[0]).to.be('.');
- expect(logger.log.getCall(8).args[0]).to.be('.');
- expect(logger.log.getCall(9).args[0]).to.be('.');
- expect(logger.log.getCall(10).args[0]).to.be('.');
- expect(logger.log.getCall(11).args[0]).to.be('.');
- expect(logger.log.getCall(12).args[0]).to.be('.');
- expect(logger.log.getCall(13).args[0]).to.be('.');
- expect(logger.log.getCall(14).args[0]).to.be('.');
- expect(logger.log.getCall(15).args[0]).to.be('.');
- expect(logger.log.getCall(16).args[0]).to.be('.');
- expect(logger.log.getCall(17).args[0]).to.be('.');
- expect(logger.log.getCall(18).args[0]).to.be('.');
- expect(logger.log.getCall(19).args[0]).to.be('.');
- expect(logger.log.getCall(20).args[0]).to.be('.');
- expect(logger.log.getCall(21).args[0]).to.match(/complete/i);
- });
-
- });
-
- it('should show dot for each 5% of completion', function () {
- progress.handleResponse({ statusCode: 200, headers: { 'content-length': 1000 } });
- expect(logger.log.callCount).to.be(1);
-
- progress.handleData({ length: 50 }); //5%
- expect(logger.log.callCount).to.be(2);
-
- progress.handleData({ length: 100 }); //15%
- expect(logger.log.callCount).to.be(4);
-
- progress.handleData({ length: 200 }); //25%
- expect(logger.log.callCount).to.be(8);
-
- progress.handleData({ length: 590 }); //94%
- expect(logger.log.callCount).to.be(20);
-
- progress.handleData({ length: 60 }); //100%
- expect(logger.log.callCount).to.be(21);
-
- //Any progress over 100% should be ignored.
- progress.handleData({ length: 9999 });
- expect(logger.log.callCount).to.be(21);
-
- progress.handleEnd();
- expect(logger.log.callCount).to.be(22);
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(false);
- expect(logger.log.getCall(0).args[0]).to.match(/downloading/i);
- expect(logger.log.getCall(21).args[0]).to.match(/complete/i);
- });
- });
-
- });
-
- describe('handleEnd', function () {
-
- it('should reject the deferred with a ENOTFOUND error if the reporter is in an error state', function () {
- progress.handleResponse({ statusCode: 400 });
-
- progress.handleEnd();
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.firstCall.args[0].message).to.match(/ENOTFOUND/);
- expect(errorStub.called).to.be(true);
- });
- });
-
- it('should resolve if the reporter is not in an error state', function () {
- progress.handleResponse({ statusCode: 307, headers: { 'content-length': 1000 } });
-
- progress.handleEnd();
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(false);
- expect(logger.log.lastCall.args[0]).to.match(/complete/i);
- });
- });
-
- });
-
- describe('handleError', function () {
-
- it('should log any errors', function () {
- progress.handleError('ERRORMESSAGE', new Error('oops!'));
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(true);
- expect(logger.error.callCount).to.be(1);
- expect(logger.error.lastCall.args[0]).to.match(/oops!/);
- });
- });
-
- it('should set the error state of the reporter', function () {
- progress.handleError('ERRORMESSAGE', new Error('oops!'));
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(progress.hasError()).to.be(true);
- });
- });
-
- it('should ignore all errors except the first.', function () {
- progress.handleError('ERRORMESSAGE', new Error('oops!'));
- progress.handleError('ERRORMESSAGE', new Error('second error!'));
- progress.handleError('ERRORMESSAGE', new Error('third error!'));
- progress.handleError('ERRORMESSAGE', new Error('fourth error!'));
-
- var errorStub = sinon.stub();
- return progress.promise
- .catch(errorStub)
- .then(function (data) {
- expect(errorStub.called).to.be(true);
- expect(logger.error.callCount).to.be(1);
- expect(logger.error.lastCall.args[0]).to.match(/oops!/);
- });
- });
-
- });
-
- });
-
- });
-
-});
diff --git a/src/cli/plugin/__tests__/replies/corrupt.tar.gz b/src/cli/plugin/__tests__/replies/corrupt.tar.gz
deleted file mode 100644
index d737f1ef2271b..0000000000000
Binary files a/src/cli/plugin/__tests__/replies/corrupt.tar.gz and /dev/null differ
diff --git a/src/cli/plugin/__tests__/replies/package.json b/src/cli/plugin/__tests__/replies/package.json
deleted file mode 100644
index 0ee5897991d7d..0000000000000
--- a/src/cli/plugin/__tests__/replies/package.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "name": "test-plugin",
- "version": "1.0.0",
- "description": "just a test plugin",
- "repository": {
- "type": "git",
- "url": "http://website.git"
- },
- "dependencies": {
- "bluebird": "2.9.30"
- },
- "license": "Apache-2.0"
-}
\ No newline at end of file
diff --git a/src/cli/plugin/__tests__/replies/plugin-no-package.gz b/src/cli/plugin/__tests__/replies/plugin-no-package.gz
deleted file mode 100644
index 8695565d86ff7..0000000000000
Binary files a/src/cli/plugin/__tests__/replies/plugin-no-package.gz and /dev/null differ
diff --git a/src/cli/plugin/__tests__/replies/test-plugin-master.tar.gz b/src/cli/plugin/__tests__/replies/test-plugin-master.tar.gz
deleted file mode 100644
index 0fecb5e016c3b..0000000000000
Binary files a/src/cli/plugin/__tests__/replies/test-plugin-master.tar.gz and /dev/null differ
diff --git a/src/cli/plugin/__tests__/settingParser.js b/src/cli/plugin/__tests__/settingParser.js
deleted file mode 100644
index afb499a794d59..0000000000000
--- a/src/cli/plugin/__tests__/settingParser.js
+++ /dev/null
@@ -1,325 +0,0 @@
-var path = require('path');
-var expect = require('expect.js');
-
-var utils = require('requirefrom')('src/utils');
-var fromRoot = utils('fromRoot');
-var settingParser = require('../settingParser');
-
-describe('kibana cli', function () {
-
- describe('plugin installer', function () {
-
- describe('command line option parsing', function () {
-
- describe('parseMilliseconds function', function () {
-
- var parser = settingParser();
-
- it('should return 0 for an empty string', function () {
- var value = '';
-
- var result = parser.parseMilliseconds(value);
-
- expect(result).to.be(0);
- });
-
- it('should return 0 for a number with an invalid unit of measure', function () {
- var result = parser.parseMilliseconds('1gigablasts');
- expect(result).to.be(0);
- });
-
- it('should assume a number with no unit of measure is specified as milliseconds', function () {
- var result = parser.parseMilliseconds(1);
- expect(result).to.be(1);
-
- result = parser.parseMilliseconds('1');
- expect(result).to.be(1);
- });
-
- it('should interpret a number with "s" as the unit of measure as seconds', function () {
- var result = parser.parseMilliseconds('5s');
- expect(result).to.be(5 * 1000);
- });
-
- it('should interpret a number with "second" as the unit of measure as seconds', function () {
- var result = parser.parseMilliseconds('5second');
- expect(result).to.be(5 * 1000);
- });
-
- it('should interpret a number with "seconds" as the unit of measure as seconds', function () {
- var result = parser.parseMilliseconds('5seconds');
- expect(result).to.be(5 * 1000);
- });
-
- it('should interpret a number with "m" as the unit of measure as minutes', function () {
- var result = parser.parseMilliseconds('9m');
- expect(result).to.be(9 * 1000 * 60);
- });
-
- it('should interpret a number with "minute" as the unit of measure as minutes', function () {
- var result = parser.parseMilliseconds('9minute');
- expect(result).to.be(9 * 1000 * 60);
- });
-
- it('should interpret a number with "minutes" as the unit of measure as minutes', function () {
- var result = parser.parseMilliseconds('9minutes');
- expect(result).to.be(9 * 1000 * 60);
- });
-
- });
-
- describe('parse function', function () {
-
- var options;
- var parser;
- beforeEach(function () {
- options = { install: 'dummy/dummy', pluginDir: fromRoot('installedPlugins') };
- });
-
- it('should require the user to specify either install and remove', function () {
- options.install = null;
- parser = settingParser(options);
-
- expect(parser.parse).withArgs().to.throwError(/Please specify either --install or --remove./);
- });
-
- it('should not allow the user to specify both install and remove', function () {
- options.remove = 'package';
- options.install = 'org/package/version';
- parser = settingParser(options);
-
- expect(parser.parse).withArgs().to.throwError(/Please specify either --install or --remove./);
- });
-
- describe('quiet option', function () {
-
- it('should default to false', function () {
- parser = settingParser(options);
- var settings = parser.parse(options);
-
- expect(settings.quiet).to.be(false);
- });
-
- it('should set settings.quiet property to true', function () {
- options.parent = { quiet: true };
- parser = settingParser(options);
- var settings = parser.parse(options);
-
- expect(settings.quiet).to.be(true);
- });
-
- });
-
- describe('silent option', function () {
-
- it('should default to false', function () {
- parser = settingParser(options);
- var settings = parser.parse(options);
-
- expect(settings).to.have.property('silent', false);
- });
-
- it('should set settings.silent property to true', function () {
- options.silent = true;
- parser = settingParser(options);
- var settings = parser.parse(options);
-
- expect(settings).to.have.property('silent', true);
- });
-
- });
-
-
- describe('timeout option', function () {
-
- it('should default to 0 (milliseconds)', function () {
- parser = settingParser(options);
- var settings = parser.parse(options);
-
- expect(settings).to.have.property('timeout', 0);
- });
-
- it('should set settings.timeout property to specified value', function () {
- options.timeout = 1234;
- parser = settingParser(options);
- var settings = parser.parse(options);
-
- expect(settings).to.have.property('timeout', 1234);
- });
-
- });
-
- describe('install option', function () {
-
- it('should set settings.action property to "install"', function () {
- options.install = 'org/package/version';
- parser = settingParser(options);
- var settings = parser.parse(options);
-
- expect(settings).to.have.property('action', 'install');
- });
-
- it('should allow two parts to the install parameter', function () {
- options.install = 'kibana/test-plugin';
- parser = settingParser(options);
-
- expect(parser.parse).withArgs().to.not.throwError();
-
- var settings = parser.parse(options);
-
- expect(settings).to.have.property('organization', 'kibana');
- expect(settings).to.have.property('package', 'test-plugin');
- expect(settings).to.have.property('version', undefined);
- });
-
- it('should allow three parts to the install parameter', function () {
- options.install = 'kibana/test-plugin/v1.0.1';
- parser = settingParser(options);
-
- expect(parser.parse).withArgs().to.not.throwError();
-
- var settings = parser.parse(options);
-
- expect(settings).to.have.property('organization', 'kibana');
- expect(settings).to.have.property('package', 'test-plugin');
- expect(settings).to.have.property('version', 'v1.0.1');
- });
-
- it('should not allow one part to the install parameter', function () {
- options.install = 'test-plugin';
- parser = settingParser(options);
-
- expect(parser.parse).withArgs().to.throwError(/Invalid install option. Please use the format \/\/./);
- });
-
- it('should not allow more than three parts to the install parameter', function () {
- options.install = 'kibana/test-plugin/v1.0.1/dummy';
- parser = settingParser(options);
-
- expect(parser.parse).withArgs().to.throwError(/Invalid install option. Please use the format \/\/./);
- });
-
- it('should populate the urls collection properly when no version specified', function () {
- options.install = 'kibana/test-plugin';
- parser = settingParser(options);
-
- var settings = parser.parse();
-
- expect(settings.urls).to.have.property('length', 2);
- expect(settings.urls).to.contain('https://download.elastic.co/kibana/test-plugin/test-plugin-latest.tar.gz');
- expect(settings.urls).to.contain('https://github.com/kibana/test-plugin/archive/master.tar.gz');
- });
-
- it('should populate the urls collection properly version specified', function () {
- options.install = 'kibana/test-plugin/v1.1.1';
- parser = settingParser(options);
-
- var settings = parser.parse();
-
- expect(settings.urls).to.have.property('length', 2);
- expect(settings.urls).to.contain('https://download.elastic.co/kibana/test-plugin/test-plugin-v1.1.1.tar.gz');
- expect(settings.urls).to.contain('https://github.com/kibana/test-plugin/archive/v1.1.1.tar.gz');
- });
-
- it('should populate the pluginPath', function () {
- options.install = 'kibana/test-plugin';
- parser = settingParser(options);
-
- var settings = parser.parse();
- var expected = fromRoot('installedPlugins/test-plugin');
-
- expect(settings).to.have.property('pluginPath', expected);
- });
-
- describe('with url option', function () {
-
- it('should allow one part to the install parameter', function () {
- options.install = 'test-plugin';
- options.url = 'http://www.google.com/plugin.tar.gz';
- parser = settingParser(options);
-
- expect(parser.parse).withArgs().to.not.throwError();
-
- var settings = parser.parse();
-
- expect(settings).to.have.property('package', 'test-plugin');
- });
-
- it('should not allow more than one part to the install parameter', function () {
- options.url = 'http://www.google.com/plugin.tar.gz';
- options.install = 'kibana/test-plugin';
- parser = settingParser(options);
-
- expect(parser.parse).withArgs()
- .to.throwError(/Invalid install option. When providing a url, please use the format ./);
- });
-
- it('should result in only the specified url in urls collection', function () {
- var url = 'http://www.google.com/plugin.tar.gz';
- options.install = 'test-plugin';
- options.url = url;
- parser = settingParser(options);
-
- var settings = parser.parse();
-
- expect(settings).to.have.property('urls');
- expect(settings.urls).to.be.an('array');
- expect(settings.urls).to.have.property('length', 1);
- expect(settings.urls).to.contain(url);
- });
-
- });
-
- });
-
- describe('remove option', function () {
-
- it('should set settings.action property to "remove"', function () {
- options.install = null;
- options.remove = 'package';
- parser = settingParser(options);
-
- var settings = parser.parse();
-
- expect(settings).to.have.property('action', 'remove');
- });
-
- it('should allow one part to the remove parameter', function () {
- options.install = null;
- options.remove = 'test-plugin';
- parser = settingParser(options);
-
- var settings = parser.parse();
-
- expect(settings).to.have.property('package', 'test-plugin');
- });
-
- it('should not allow more than one part to the install parameter', function () {
- options.install = null;
- options.remove = 'kibana/test-plugin';
- parser = settingParser(options);
-
- expect(parser.parse).withArgs()
- .to.throwError(/Invalid remove option. Please use the format ./);
- });
-
- it('should populate the pluginPath', function () {
- options.install = null;
- options.remove = 'test-plugin';
- parser = settingParser(options);
-
- var settings = parser.parse();
- var expected = fromRoot('installedPlugins/test-plugin');
-
- expect(settings).to.have.property('pluginPath', expected);
- });
-
- });
-
- });
-
- });
-
- });
-
-});
diff --git a/src/cli/plugin/plugin.js b/src/cli/plugin/plugin.js
deleted file mode 100644
index ac5a7996784b8..0000000000000
--- a/src/cli/plugin/plugin.js
+++ /dev/null
@@ -1,72 +0,0 @@
-var utils = require('requirefrom')('src/utils');
-var fromRoot = utils('fromRoot');
-
-var settingParser = require('./settingParser');
-var installer = require('./pluginInstaller');
-var remover = require('./pluginRemover');
-var pluginLogger = require('./pluginLogger');
-
-module.exports = function (program) {
- function processCommand(command, options) {
- var settings;
- try {
- settings = settingParser(command).parse();
- } catch (ex) {
- //The logger has not yet been initialized.
- console.error(ex.message);
- process.exit(64); // eslint-disable-line no-process-exit
- }
-
- var logger = pluginLogger(settings);
-
- if (settings.action === 'install') {
- installer.install(settings, logger);
- }
- if (settings.action === 'remove') {
- remover.remove(settings, logger);
- }
- }
-
- program
- .command('plugin')
- .option('-i, --install //', 'The plugin to install')
- .option('-r, --remove ', 'The plugin to remove')
- .option('-q, --quiet', 'Disable all process messaging except errors')
- .option('-s, --silent', 'Disable all process messaging')
- .option('-u, --url ', 'Specify download url')
- .option(
- '-c, --config ',
- 'Path to the config file',
- fromRoot('config/kibana.yml')
- )
- .option(
- '-t, --timeout ',
- 'Length of time before failing; 0 for never fail',
- settingParser.parseMilliseconds
- )
- .option(
- '-d, --plugin-dir ',
- 'The path to the directory where plugins are stored',
- fromRoot('installedPlugins')
- )
- .description(
- 'Maintain Plugins',
-`
- Common examples:
- -i username/sample
- attempts to download the latest version from the following urls:
- https://download.elastic.co/username/sample/sample-latest.tar.gz
- https://github.com/username/sample/archive/master.tar.gz
-
- -i username/sample/v1.1.1
- attempts to download version v1.1.1 from the following urls:
- https://download.elastic.co/username/sample/sample-v1.1.1.tar.gz
- https://github.com/username/sample/archive/v1.1.1.tar.gz
-
- -i sample -u http://www.example.com/other_name.tar.gz
- attempts to download from the specified url,
- and installs the plugin found at that url as "sample"
-`
- )
- .action(processCommand);
-};
diff --git a/src/cli/plugin/pluginCleaner.js b/src/cli/plugin/pluginCleaner.js
deleted file mode 100644
index 7cbb91fd60878..0000000000000
--- a/src/cli/plugin/pluginCleaner.js
+++ /dev/null
@@ -1,39 +0,0 @@
-var rimraf = require('rimraf');
-var fs = require('fs');
-var Promise = require('bluebird');
-
-module.exports = function (settings, logger) {
-
- function cleanPrevious() {
- return new Promise(function (resolve, reject) {
- try {
- fs.statSync(settings.workingPath);
-
- logger.log('Found previous install attempt. Deleting...');
- try {
- rimraf.sync(settings.workingPath);
- } catch (e) {
- return reject(e);
- }
- return resolve();
- } catch (e) {
- if (e.code !== 'ENOENT') return reject(e);
-
- return resolve();
- }
- });
- }
-
- function cleanError() {
- // delete the working directory.
- // At this point we're bailing, so swallow any errors on delete.
-
- try { rimraf.sync(settings.workingPath); }
- catch (e) {} // eslint-disable-line no-empty
- }
-
- return {
- cleanPrevious: cleanPrevious,
- cleanError: cleanError
- };
-};
diff --git a/src/cli/plugin/pluginDownloader.js b/src/cli/plugin/pluginDownloader.js
deleted file mode 100644
index f9d4b4afc9bb2..0000000000000
--- a/src/cli/plugin/pluginDownloader.js
+++ /dev/null
@@ -1,98 +0,0 @@
-var _ = require('lodash');
-var zlib = require('zlib');
-var Promise = require('bluebird');
-var url = require('url');
-var fs = require('fs');
-var request = require('request');
-var tar = require('tar');
-var progressReporter = require('./progressReporter');
-
-module.exports = function (settings, logger) {
-
- //Attempts to download each url in turn until one is successful
- function download() {
- var urls = settings.urls;
-
- function tryNext() {
- var sourceUrl = urls.shift();
- if (!sourceUrl) {
- throw new Error('Not a valid url.');
- }
-
- logger.log('Attempting to extract from ' + sourceUrl);
-
- return Promise.try(function () {
- return downloadSingle(sourceUrl, settings.workingPath, settings.timeout, logger)
- .catch(function (err) {
- if (err.message === 'ENOTFOUND') {
- return tryNext();
- }
- if (err.message === 'EEXTRACT') {
- throw (new Error('Error extracting the plugin archive... is this a valid tar.gz file?'));
- }
- throw (err);
- });
- })
- .catch(function (err) {
- //Special case for when request.get throws an exception
- if (err.message.match(/invalid uri/i)) {
- return tryNext();
- }
- throw (err);
- });
- }
-
- return tryNext();
- }
-
- //Attempts to download a single url
- function downloadSingle(source, dest, timeout) {
- var gunzip = zlib.createGunzip();
- var tarExtract = new tar.Extract({ path: dest, strip: 1 });
-
- var requestOptions = { url: source };
- if (timeout !== 0) {
- requestOptions.timeout = timeout;
- }
-
- return wrappedRequest(requestOptions)
- .then(function (fileStream) {
- var reporter = progressReporter(logger, fileStream);
-
- fileStream
- .on('response', reporter.handleResponse)
- .on('data', reporter.handleData)
- .on('error', _.partial(reporter.handleError, 'ENOTFOUND'))
- .pipe(gunzip)
- .on('error', _.partial(reporter.handleError, 'EEXTRACT'))
- .pipe(tarExtract)
- .on('error', _.partial(reporter.handleError, 'EEXTRACT'))
- .on('end', reporter.handleEnd);
-
- return reporter.promise;
- });
- }
-
- function wrappedRequest(requestOptions) {
- return Promise.try(function () {
- let urlInfo = url.parse(requestOptions.url);
- if (/^file/.test(urlInfo.protocol)) {
- return fs.createReadStream(urlInfo.path);
- } else {
- return request.get(requestOptions);
- }
- })
- .catch(function (err) {
- if (err.message.match(/invalid uri/i)) {
- throw new Error('ENOTFOUND');
- }
- throw err;
- });
- }
-
-
- return {
- download: download,
- _downloadSingle: downloadSingle
- };
-};
diff --git a/src/cli/plugin/pluginInstaller.js b/src/cli/plugin/pluginInstaller.js
deleted file mode 100644
index f92ade8446a3f..0000000000000
--- a/src/cli/plugin/pluginInstaller.js
+++ /dev/null
@@ -1,71 +0,0 @@
-let _ = require('lodash');
-var utils = require('requirefrom')('src/utils');
-var fromRoot = utils('fromRoot');
-var pluginDownloader = require('./pluginDownloader');
-var pluginCleaner = require('./pluginCleaner');
-var KbnServer = require('../../server/KbnServer');
-var readYamlConfig = require('../serve/readYamlConfig');
-var fs = require('fs');
-
-module.exports = {
- install: install
-};
-
-function install(settings, logger) {
- logger.log(`Installing ${settings.package}`);
-
- try {
- fs.statSync(settings.pluginPath);
-
- logger.error(`Plugin ${settings.package} already exists, please remove before installing a new version`);
- process.exit(70); // eslint-disable-line no-process-exit
- } catch (e) {
- if (e.code !== 'ENOENT') throw e;
- }
-
- var cleaner = pluginCleaner(settings, logger);
- var downloader = pluginDownloader(settings, logger);
-
- return cleaner.cleanPrevious()
- .then(function () {
- return downloader.download();
- })
- .then(async function() {
- logger.log('Optimizing and caching browser bundles...');
- let serverConfig = _.merge(
- readYamlConfig(settings.config),
- {
- env: 'production',
- logging: {
- silent: settings.silent,
- quiet: !settings.silent,
- verbose: false
- },
- optimize: {
- useBundleCache: false
- },
- server: {
- autoListen: false
- },
- plugins: {
- initialize: false,
- scanDirs: [settings.pluginDir, fromRoot('src/plugins')],
- paths: [settings.workingPath]
- }
- }
- );
-
- let kbnServer = new KbnServer(serverConfig);
- await kbnServer.ready();
- await kbnServer.close();
- })
- .then(function () {
- fs.renameSync(settings.workingPath, settings.pluginPath);
- logger.log('Plugin installation complete');
- })
- .catch(function (e) {
- logger.error(`Plugin installation was unsuccessful due to error "${e.message}"`);
- cleaner.cleanError();
- process.exit(70); // eslint-disable-line no-process-exit
- });
-}
diff --git a/src/cli/plugin/pluginLogger.js b/src/cli/plugin/pluginLogger.js
deleted file mode 100644
index 99b5a33976851..0000000000000
--- a/src/cli/plugin/pluginLogger.js
+++ /dev/null
@@ -1,44 +0,0 @@
-module.exports = function (settings) {
- var previousLineEnded = true;
- var silent = !!settings.silent;
- var quiet = !!settings.quiet;
-
- function log(data, sameLine) {
- if (silent || quiet) return;
-
- if (!sameLine && !previousLineEnded) {
- process.stdout.write('\n');
- }
-
- //if data is a stream, pipe it.
- if (data.readable) {
- data.pipe(process.stdout);
- return;
- }
-
- process.stdout.write(data);
- if (!sameLine) process.stdout.write('\n');
- previousLineEnded = !sameLine;
- }
-
- function error(data) {
- if (silent) return;
-
- if (!previousLineEnded) {
- process.stderr.write('\n');
- }
-
- //if data is a stream, pipe it.
- if (data.readable) {
- data.pipe(process.stderr);
- return;
- }
- process.stderr.write(data + '\n');
- previousLineEnded = true;
- }
-
- return {
- log: log,
- error: error
- };
-};
diff --git a/src/cli/plugin/pluginRemover.js b/src/cli/plugin/pluginRemover.js
deleted file mode 100644
index 2beb128819f89..0000000000000
--- a/src/cli/plugin/pluginRemover.js
+++ /dev/null
@@ -1,23 +0,0 @@
-var fs = require('fs');
-var rimraf = require('rimraf');
-
-module.exports = {
- remove: remove
-};
-
-function remove(settings, logger) {
- try {
- try {
- fs.statSync(settings.pluginPath);
- } catch (e) {
- logger.log(`Plugin ${settings.package} does not exist`);
- return;
- }
-
- logger.log(`Removing ${settings.package}...`);
- rimraf.sync(settings.pluginPath);
- } catch (err) {
- logger.error(`Unable to remove plugin "${settings.package}" because of error: "${err.message}"`);
- process.exit(74); // eslint-disable-line no-process-exit
- }
-}
diff --git a/src/cli/plugin/progressReporter.js b/src/cli/plugin/progressReporter.js
deleted file mode 100644
index 84962d20c62a5..0000000000000
--- a/src/cli/plugin/progressReporter.js
+++ /dev/null
@@ -1,71 +0,0 @@
-var Promise = require('bluebird');
-
-/*
-Responsible for reporting the progress of the file stream
-*/
-module.exports = function (logger, stream) {
- var oldDotCount = 0;
- var runningTotal = 0;
- var totalSize = 0;
- var hasError = false;
- var _resolve;
- var _reject;
- var _resp;
-
- var promise = new Promise(function (resolve, reject) {
- _resolve = resolve;
- _reject = reject;
- });
-
- function handleError(errorMessage, err) {
- if (hasError) return;
-
- if (err) logger.error(err);
- hasError = true;
- if (stream.abort) stream.abort();
- _reject(new Error(errorMessage));
- }
-
- function handleResponse(resp) {
- _resp = resp;
- if (resp.statusCode >= 400) {
- handleError('ENOTFOUND', null);
- } else {
- totalSize = parseInt(resp.headers['content-length'], 10) || 0;
- var totalDesc = totalSize || 'unknown number of';
-
- logger.log('Downloading ' + totalDesc + ' bytes', true);
- }
- }
-
- //Should log a dot for every 5% of progress
- //Note: no progress is logged if the plugin is downloaded in a single packet
- function handleData(buffer) {
- if (hasError) return;
- if (!totalSize) return;
-
- runningTotal += buffer.length;
- var dotCount = Math.round(runningTotal / totalSize * 100 / 5);
- if (dotCount > 20) dotCount = 20;
- for (var i = 0; i < (dotCount - oldDotCount); i++) {
- logger.log('.', true);
- }
- oldDotCount = dotCount;
- }
-
- function handleEnd() {
- if (hasError) return;
-
- logger.log('Extraction complete');
- _resolve();
- }
-
- return {
- promise: promise,
- handleResponse: handleResponse,
- handleError: handleError,
- handleData: handleData,
- handleEnd: handleEnd,
- hasError: function () { return hasError; }
- };
-};
diff --git a/src/cli/plugin/settingParser.js b/src/cli/plugin/settingParser.js
deleted file mode 100644
index 8272b2ea126b6..0000000000000
--- a/src/cli/plugin/settingParser.js
+++ /dev/null
@@ -1,112 +0,0 @@
-var { resolve } = require('path');
-var expiry = require('expiry-js');
-
-module.exports = function (options) {
- function parseMilliseconds(val) {
- var result;
-
- try {
- var timeVal = expiry(val);
- result = timeVal.asMilliseconds();
- } catch (ex) {
- result = 0;
- }
-
- return result;
- }
-
- function generateDownloadUrl(settings) {
- var version = (settings.version) || 'latest';
- var filename = settings.package + '-' + version + '.tar.gz';
-
- return 'https://download.elastic.co/' + settings.organization + '/' + settings.package + '/' + filename;
- }
-
- function generateGithubUrl(settings) {
- var version = (settings.version) || 'master';
- var filename = version + '.tar.gz';
-
- return 'https://github.com/' + settings.organization + '/' + settings.package + '/archive/' + filename;
- }
-
- function parse() {
- var parts;
- var settings = {
- timeout: 0,
- silent: false,
- quiet: false,
- urls: []
- };
-
- if (options.timeout) {
- settings.timeout = options.timeout;
- }
-
- if (options.parent && options.parent.quiet) {
- settings.quiet = options.parent.quiet;
- }
-
- if (options.silent) {
- settings.silent = options.silent;
- }
-
- if (options.url) {
- settings.urls.push(options.url);
- }
-
- if (options.config) {
- settings.config = options.config;
- }
-
- if (options.install) {
- settings.action = 'install';
- parts = options.install.split('/');
-
- if (options.url) {
- if (parts.length !== 1) {
- throw new Error('Invalid install option. When providing a url, please use the format .');
- }
-
- settings.package = parts.shift();
- } else {
- if (parts.length < 2 || parts.length > 3) {
- throw new Error('Invalid install option. Please use the format //.');
- }
-
- settings.organization = parts.shift();
- settings.package = parts.shift();
- settings.version = parts.shift();
-
- settings.urls.push(generateDownloadUrl(settings));
- settings.urls.push(generateGithubUrl(settings));
- }
- }
-
- if (options.remove) {
- settings.action = 'remove';
- parts = options.remove.split('/');
-
- if (parts.length !== 1) {
- throw new Error('Invalid remove option. Please use the format .');
- }
- settings.package = parts.shift();
- }
-
- if (!settings.action || (options.install && options.remove)) {
- throw new Error('Please specify either --install or --remove.');
- }
-
- settings.pluginDir = options.pluginDir;
- if (settings.package) {
- settings.pluginPath = resolve(settings.pluginDir, settings.package);
- settings.workingPath = resolve(settings.pluginDir, '.plugin.installing');
- }
-
- return settings;
- }
-
- return {
- parse: parse,
- parseMilliseconds: parseMilliseconds
- };
-};
diff --git a/src/cli/serve/__tests__/deprecated_config.js b/src/cli/serve/__tests__/deprecated_config.js
new file mode 100644
index 0000000000000..c132e814dcd12
--- /dev/null
+++ b/src/cli/serve/__tests__/deprecated_config.js
@@ -0,0 +1,48 @@
+import expect from 'expect.js';
+import { set } from 'lodash';
+import { checkForDeprecatedConfig } from '../deprecated_config';
+import sinon from 'auto-release-sinon';
+
+describe('cli/serve/deprecated_config', function () {
+ it('passes original config through', function () {
+ const config = {};
+ set(config, 'server.xsrf.token', 'xxtokenxx');
+ const output = checkForDeprecatedConfig(config);
+ expect(output).to.be(config);
+ expect(output.server).to.be(config.server);
+ expect(output.server.xsrf).to.be(config.server.xsrf);
+ expect(output.server.xsrf.token).to.be(config.server.xsrf.token);
+ });
+
+ it('logs warnings about deprecated config values', function () {
+ const log = sinon.stub();
+ const config = {};
+ set(config, 'server.xsrf.token', 'xxtokenxx');
+ checkForDeprecatedConfig(config, log);
+ sinon.assert.calledOnce(log);
+ expect(log.firstCall.args[0]).to.match(/server\.xsrf\.token.+deprecated/);
+ });
+
+ describe('does not support compound.keys', function () {
+ it('ignores fully compound keys', function () {
+ const log = sinon.stub();
+ const config = { 'server.xsrf.token': 'xxtokenxx' };
+ checkForDeprecatedConfig(config, log);
+ sinon.assert.notCalled(log);
+ });
+
+ it('ignores partially compound keys', function () {
+ const log = sinon.stub();
+ const config = { server: { 'xsrf.token': 'xxtokenxx' } };
+ checkForDeprecatedConfig(config, log);
+ sinon.assert.notCalled(log);
+ });
+
+ it('ignores partially compound keys', function () {
+ const log = sinon.stub();
+ const config = { 'server.xsrf': { token: 'xxtokenxx' } };
+ checkForDeprecatedConfig(config, log);
+ sinon.assert.notCalled(log);
+ });
+ });
+});
diff --git a/src/cli/serve/__tests__/fixtures/deprecated.yml b/src/cli/serve/__tests__/fixtures/deprecated.yml
new file mode 100644
index 0000000000000..748197e8957f6
--- /dev/null
+++ b/src/cli/serve/__tests__/fixtures/deprecated.yml
@@ -0,0 +1 @@
+server.xsrf.token: token
diff --git a/src/cli/serve/__tests__/fixtures/legacy.yml b/src/cli/serve/__tests__/fixtures/legacy.yml
new file mode 100644
index 0000000000000..080a80941646c
--- /dev/null
+++ b/src/cli/serve/__tests__/fixtures/legacy.yml
@@ -0,0 +1 @@
+kibana_index: indexname
diff --git a/src/cli/serve/__tests__/fixtures/one.yml b/src/cli/serve/__tests__/fixtures/one.yml
new file mode 100644
index 0000000000000..e577d50638d5f
--- /dev/null
+++ b/src/cli/serve/__tests__/fixtures/one.yml
@@ -0,0 +1,2 @@
+foo: 1
+bar: true
diff --git a/src/cli/serve/__tests__/fixtures/reload_logging_config/kibana.test.yml b/src/cli/serve/__tests__/fixtures/reload_logging_config/kibana.test.yml
new file mode 100644
index 0000000000000..22c5e93375c5f
--- /dev/null
+++ b/src/cli/serve/__tests__/fixtures/reload_logging_config/kibana.test.yml
@@ -0,0 +1,6 @@
+server:
+ port: 8274
+logging:
+ json: true
+optimize:
+ enabled: false
diff --git a/src/cli/serve/__tests__/fixtures/two.yml b/src/cli/serve/__tests__/fixtures/two.yml
new file mode 100644
index 0000000000000..aef807fcaebe9
--- /dev/null
+++ b/src/cli/serve/__tests__/fixtures/two.yml
@@ -0,0 +1,2 @@
+foo: 2
+baz: bonkers
diff --git a/src/cli/serve/__tests__/legacy_config.js b/src/cli/serve/__tests__/legacy_config.js
new file mode 100644
index 0000000000000..a380ae9e485c9
--- /dev/null
+++ b/src/cli/serve/__tests__/legacy_config.js
@@ -0,0 +1,28 @@
+import expect from 'expect.js';
+import { rewriteLegacyConfig } from '../legacy_config';
+import sinon from 'auto-release-sinon';
+
+describe('cli/serve/legacy_config', function () {
+ it('returns a clone of the input', function () {
+ const file = {};
+ const output = rewriteLegacyConfig(file);
+ expect(output).to.not.be(file);
+ });
+
+ it('rewrites legacy config values with literal path replacement', function () {
+ const file = { port: 4000, host: 'kibana.com' };
+ const output = rewriteLegacyConfig(file);
+ expect(output).to.not.be(file);
+ expect(output).to.eql({
+ 'server.port': 4000,
+ 'server.host': 'kibana.com',
+ });
+ });
+
+ it('logs warnings when legacy config properties are encountered', function () {
+ const log = sinon.stub();
+ rewriteLegacyConfig({ port: 5555 }, log);
+ sinon.assert.calledOnce(log);
+ expect(log.firstCall.args[0]).to.match(/port.+deprecated.+server\.port/);
+ });
+});
diff --git a/src/cli/serve/__tests__/read_yaml_config.js b/src/cli/serve/__tests__/read_yaml_config.js
new file mode 100644
index 0000000000000..29b620b27dbb0
--- /dev/null
+++ b/src/cli/serve/__tests__/read_yaml_config.js
@@ -0,0 +1,102 @@
+import expect from 'expect.js';
+import { join, relative, resolve } from 'path';
+import readYamlConfig from '../read_yaml_config';
+import sinon from 'auto-release-sinon';
+
+function fixture(name) {
+ return resolve(__dirname, 'fixtures', name);
+}
+
+describe('cli/serve/read_yaml_config', function () {
+ it('reads a single config file', function () {
+ const config = readYamlConfig(fixture('one.yml'));
+
+ expect(readYamlConfig(fixture('one.yml'))).to.eql({
+ foo: 1,
+ bar: true,
+ });
+ });
+
+ it('reads and merged mulitple config file', function () {
+ const config = readYamlConfig([
+ fixture('one.yml'),
+ fixture('two.yml')
+ ]);
+
+ expect(config).to.eql({
+ foo: 2,
+ bar: true,
+ baz: 'bonkers'
+ });
+ });
+
+ context('different cwd()', function () {
+ const oldCwd = process.cwd();
+ const newCwd = join(oldCwd, '..');
+
+ before(function () {
+ process.chdir(newCwd);
+ });
+
+ it('resolves relative files based on the cwd', function () {
+ const relativePath = relative(newCwd, fixture('one.yml'));
+ const config = readYamlConfig(relativePath);
+ expect(config).to.eql({
+ foo: 1,
+ bar: true,
+ });
+ });
+
+ it('fails to load relative paths, not found because of the cwd', function () {
+ expect(function () {
+ readYamlConfig(relative(oldCwd, fixture('one.yml')));
+ }).to.throwException(/ENOENT/);
+ });
+
+ after(function () {
+ process.chdir(oldCwd);
+ });
+ });
+
+ context('stubbed stdout', function () {
+ let stub;
+
+ beforeEach(function () {
+ stub = sinon.stub(process.stdout, 'write');
+ });
+
+ context('deprecated settings', function () {
+ it('warns about deprecated settings', function () {
+ readYamlConfig(fixture('deprecated.yml'));
+ sinon.assert.calledOnce(stub);
+ expect(stub.firstCall.args[0]).to.match(/deprecated/);
+ stub.restore();
+ });
+
+ it('only warns once about deprecated settings', function () {
+ readYamlConfig(fixture('deprecated.yml'));
+ readYamlConfig(fixture('deprecated.yml'));
+ readYamlConfig(fixture('deprecated.yml'));
+ sinon.assert.notCalled(stub); // already logged in previous test
+ stub.restore();
+ });
+ });
+
+ context('legacy settings', function () {
+ it('warns about deprecated settings', function () {
+ readYamlConfig(fixture('legacy.yml'));
+ sinon.assert.calledOnce(stub);
+ expect(stub.firstCall.args[0]).to.match(/has been replaced/);
+ stub.restore();
+ });
+
+ it('only warns once about legacy settings', function () {
+ readYamlConfig(fixture('legacy.yml'));
+ readYamlConfig(fixture('legacy.yml'));
+ readYamlConfig(fixture('legacy.yml'));
+ sinon.assert.notCalled(stub); // already logged in previous test
+ stub.restore();
+ });
+ });
+ });
+});
diff --git a/src/cli/serve/__tests__/reload_logging_config.js b/src/cli/serve/__tests__/reload_logging_config.js
new file mode 100644
index 0000000000000..e53e646974f14
--- /dev/null
+++ b/src/cli/serve/__tests__/reload_logging_config.js
@@ -0,0 +1,88 @@
+import { spawn } from 'child_process';
+import { writeFileSync, readFile } from 'fs';
+import { relative, resolve } from 'path';
+import { safeDump } from 'js-yaml';
+import es from 'event-stream';
+import readYamlConfig from '../read_yaml_config';
+import expect from 'expect.js';
+const testConfigFile = follow(`fixtures/reload_logging_config/kibana.test.yml`);
+const cli = follow(`../../../../bin/kibana`);
+
+function follow(file) {
+ return relative(process.cwd(), resolve(__dirname, file));
+}
+
+function setLoggingJson(enabled) {
+ const conf = readYamlConfig(testConfigFile);
+ conf.logging = conf.logging || {};
+ conf.logging.json = enabled;
+ const yaml = safeDump(conf);
+ writeFileSync(testConfigFile, yaml);
+ return conf;
+}
+
+describe(`Server logging configuration`, function () {
+ it(`should be reloadable via SIGHUP process signaling`, function (done) {
+ let asserted = false;
+ let json = Infinity;
+ const conf = setLoggingJson(true);
+ const child = spawn(cli, [`--config`, testConfigFile]);
+
+ child.on('error', err => {
+ done(new Error(`error in child process while attempting to reload config.
+${err.stack || err.message || err}`));
+ });
+
+ child.on('exit', code => {
+ expect(asserted).to.eql(true);
+ expect(code === null || code === 0).to.eql(true);
+ done();
+ });
+
+ child.stdout
+ .pipe(es.split())
+ .pipe(es.mapSync(function (line) {
+ if (!line) {
+ return line; // ignore empty lines
+ }
+ if (json--) {
+ expect(parseJsonLogLine).withArgs(line).to.not.throwError();
+ } else {
+ expectPlainTextLogLine(line);
+ }
+ }));
+
+ function parseJsonLogLine(line) {
+ try {
+ const data = JSON.parse(line);
+ const listening = data.tags.indexOf(`listening`) !== -1;
+ if (listening) {
+ switchToPlainTextLog();
+ }
+ } catch (err) {
+ expect(`Error parsing log line as JSON\n
+${err.stack || err.message || err}`).to.eql(true);
+ }
+ }
+
+ function switchToPlainTextLog() {
+ json = 2; // ignore both "reloading" messages
+ setLoggingJson(false);
+ child.kill(`SIGHUP`); // reload logging config
+ }
+
+ function expectPlainTextLogLine(line) {
+ // assert
+ const tags = `[\u001b[32minfo\u001b[39m][\u001b[36mconfig\u001b[39m]`;
+ const status = `Reloaded logging configuration due to SIGHUP.`;
+ const expected = `${tags} ${status}`;
+ const actual = line.slice(-expected.length);
+ expect(actual).to.eql(expected);
+
+ // cleanup
+ asserted = true;
+ setLoggingJson(true);
+ child.kill();
+ }
+ });
+});
diff --git a/src/cli/serve/deprecated_config.js b/src/cli/serve/deprecated_config.js
new file mode 100644
index 0000000000000..d0ec271a8cee8
--- /dev/null
+++ b/src/cli/serve/deprecated_config.js
@@ -0,0 +1,16 @@
+import { forOwn, has, noop } from 'lodash';
+
+// deprecated settings are still allowed, but will be removed at a later time. They
+// are checked for after the config object is prepared and known, so legacySettings
+// will have already been transformed.
+export const deprecatedSettings = new Map([
+ [['server', 'xsrf', 'token'], 'server.xsrf.token is deprecated. It is no longer used when providing xsrf protection.']
+]);
+
+// check for and warn about deprecated settings
+export function checkForDeprecatedConfig(object, log = noop) {
+ for (const [key, msg] of deprecatedSettings.entries()) {
+ if (has(object, key)) log(msg);
+ }
+ return object;
+}
diff --git a/src/cli/serve/legacy_config.js b/src/cli/serve/legacy_config.js
new file mode 100644
index 0000000000000..75fbb4e407eac
--- /dev/null
+++ b/src/cli/serve/legacy_config.js
@@ -0,0 +1,47 @@
+import { noop, transform } from 'lodash';
+
+// legacySettings allow kibana 4.2+ to accept the same config file that people
+// used for kibana 4.0 and 4.1. These settings are transformed to their modern
+// equivalents at the very begining of the process
+export const legacySettings = {
+ // server
+ port: 'server.port',
+ host: 'server.host',
+ pid_file: 'pid.file',
+ ssl_cert_file: 'server.ssl.cert',
+ ssl_key_file: 'server.ssl.key',
+
+ // logging
+ log_file: 'logging.dest',
+
+ // kibana
+ kibana_index: 'kibana.index',
+ default_app_id: 'kibana.defaultAppId',
+
+ // es
+ ca: 'elasticsearch.ssl.ca',
+ elasticsearch_preserve_host: 'elasticsearch.preserveHost',
+ elasticsearch_url: 'elasticsearch.url',
+ kibana_elasticsearch_client_crt: 'elasticsearch.ssl.cert',
+ kibana_elasticsearch_client_key: 'elasticsearch.ssl.key',
+ kibana_elasticsearch_password: 'elasticsearch.password',
+ kibana_elasticsearch_username: 'elasticsearch.username',
+ ping_timeout: 'elasticsearch.pingTimeout',
+ request_timeout: 'elasticsearch.requestTimeout',
+ shard_timeout: 'elasticsearch.shardTimeout',
+ startup_timeout: 'elasticsearch.startupTimeout',
+ verify_ssl: 'elasticsearch.ssl.verify',
+};
+
+// transform legacy options into new namespaced versions
+export function rewriteLegacyConfig(object, log = noop) {
+ return transform(object, (clone, val, key) => {
+ if (legacySettings.hasOwnProperty(key)) {
+ const replacement = legacySettings[key];
+ log(`Config key "${key}" is deprecated. It has been replaced with "${replacement}"`);
+ clone[replacement] = val;
+ } else {
+ clone[key] = val;
+ }
+ }, {});
+}
diff --git a/src/cli/serve/readYamlConfig.js b/src/cli/serve/readYamlConfig.js
deleted file mode 100644
index 2bad080edd59e..0000000000000
--- a/src/cli/serve/readYamlConfig.js
+++ /dev/null
@@ -1,62 +0,0 @@
-let _ = require('lodash');
-let fs = require('fs');
-let yaml = require('js-yaml');
-
-let utils = require('requirefrom')('src/utils');
-let fromRoot = utils('fromRoot');
-
-let legacySettingMap = {
- // server
- port: 'server.port',
- host: 'server.host',
- pid_file: 'pid.file',
- ssl_cert_file: 'server.ssl.cert',
- ssl_key_file: 'server.ssl.key',
-
- // logging
- log_file: 'logging.dest',
-
- // kibana
- kibana_index: 'kibana.index',
- default_app_id: 'kibana.defaultAppId',
-
- // es
- ca: 'elasticsearch.ssl.ca',
- elasticsearch_preserve_host: 'elasticsearch.preserveHost',
- elasticsearch_url: 'elasticsearch.url',
- kibana_elasticsearch_client_crt: 'elasticsearch.ssl.cert',
- kibana_elasticsearch_client_key: 'elasticsearch.ssl.key',
- kibana_elasticsearch_password: 'elasticsearch.password',
- kibana_elasticsearch_username: 'elasticsearch.username',
- ping_timeout: 'elasticsearch.pingTimeout',
- request_timeout: 'elasticsearch.requestTimeout',
- shard_timeout: 'elasticsearch.shardTimeout',
- startup_timeout: 'elasticsearch.startupTimeout',
- verify_ssl: 'elasticsearch.ssl.verify',
-};
-
-module.exports = function (path) {
- if (!path) return {};
-
- let file = yaml.safeLoad(fs.readFileSync(path, 'utf8'));
-
- function apply(config, val, key) {
- if (_.isPlainObject(val)) {
- _.forOwn(val, function (subVal, subKey) {
- apply(config, subVal, key + '.' + subKey);
- });
- } else {
- _.set(config, key, val);
- }
- }
-
- // transform legeacy options into new namespaced versions
- return _.transform(file, function (config, val, key) {
- if (legacySettingMap.hasOwnProperty(key)) {
- key = legacySettingMap[key];
- }
-
- apply(config, val, key);
- }, {});
-};
-
diff --git a/src/cli/serve/read_yaml_config.js b/src/cli/serve/read_yaml_config.js
new file mode 100644
index 0000000000000..18e3a4520e875
--- /dev/null
+++ b/src/cli/serve/read_yaml_config.js
@@ -0,0 +1,40 @@
+import { chain, isArray, isPlainObject, forOwn, memoize, set, transform } from 'lodash';
+import { readFileSync as read } from 'fs';
+import { safeLoad } from 'js-yaml';
+import { red } from 'ansicolors';
+
+import { fromRoot } from '../../utils';
+import { rewriteLegacyConfig } from './legacy_config';
+import { checkForDeprecatedConfig } from './deprecated_config';
+
+const log = memoize(function (message) {
+ console.log(red('WARNING:'), message);
+});
+
+export function merge(sources) {
+ return transform(sources, (merged, source) => {
+ forOwn(source, function apply(val, key) {
+ if (isPlainObject(val)) {
+ forOwn(val, function (subVal, subKey) {
+ apply(subVal, key + '.' + subKey);
+ });
+ return;
+ }
+
+ if (isArray(val)) {
+ set(merged, key, []);
+ val.forEach((subVal, i) => apply(subVal, key + '.' + i));
+ return;
+ }
+
+ set(merged, key, val);
+ });
+ }, {});
+}
+
+export default function (paths) {
+ const files = [].concat(paths || []);
+ const yamls = files.map(path => safeLoad(read(path, 'utf8')));
+ const config = merge(yamls.map(file => rewriteLegacyConfig(file, log)));
+ return checkForDeprecatedConfig(config, log);
+}
diff --git a/src/cli/serve/serve.js b/src/cli/serve/serve.js
index d6d3834bd4d0d..9b73b11720f28 100644
--- a/src/cli/serve/serve.js
+++ b/src/cli/serve/serve.js
@@ -1,32 +1,72 @@
-let _ = require('lodash');
-let { isWorker } = require('cluster');
-let { resolve } = require('path');
-
-let cwd = process.cwd();
-let src = require('requirefrom')('src');
-let fromRoot = src('utils/fromRoot');
+import _ from 'lodash';
+import { statSync } from 'fs';
+import { isWorker } from 'cluster';
+import { resolve } from 'path';
+import { fromRoot } from '../../utils';
+import readYamlConfig from './read_yaml_config';
let canCluster;
try {
- require.resolve('../cluster/ClusterManager');
+ require.resolve('../cluster/cluster_manager');
canCluster = true;
} catch (e) {
canCluster = false;
}
-let pathCollector = function () {
- let paths = [];
+const pathCollector = function () {
+ const paths = [];
return function (path) {
paths.push(resolve(process.cwd(), path));
return paths;
};
};
-let pluginDirCollector = pathCollector();
-let pluginPathCollector = pathCollector();
+const configPathCollector = pathCollector();
+const pluginDirCollector = pathCollector();
+const pluginPathCollector = pathCollector();
+
+function readServerSettings(opts, extraCliOptions) {
+ const settings = readYamlConfig(opts.config);
+ const set = _.partial(_.set, settings);
+ const get = _.partial(_.get, settings);
+ const has = _.partial(_.has, settings);
+ const merge = _.partial(_.merge, settings);
+
+ if (opts.dev) {
+ set('env', 'development');
+ set('optimize.lazy', true);
+ if (opts.ssl && !has('server.ssl.cert') && !has('server.ssl.key')) {
+ set('server.host', 'localhost');
+ set('server.ssl.cert', fromRoot('test/dev_certs/server.crt'));
+ set('server.ssl.key', fromRoot('test/dev_certs/server.key'));
+ }
+ }
+
+ if (opts.elasticsearch) set('elasticsearch.url', opts.elasticsearch);
+ if (opts.port) set('server.port', opts.port);
+ if (opts.host) set('server.host', opts.host);
+ if (opts.quiet) set('logging.quiet', true);
+ if (opts.silent) set('logging.silent', true);
+ if (opts.verbose) set('logging.verbose', true);
+ if (opts.logFile) set('logging.dest', opts.logFile);
+
+ set('plugins.scanDirs', _.compact([].concat(
+ get('plugins.scanDirs'),
+ opts.pluginDir
+ )));
+
+ set('plugins.paths', _.compact([].concat(
+ get('plugins.paths'),
+ opts.pluginPath
+ )));
+
+ merge(extraCliOptions);
+
+ return settings;
+}
module.exports = function (program) {
- let command = program.command('serve');
+ const command = program.command('serve');
command
.description('Run the kibana server')
@@ -34,8 +74,11 @@ module.exports = function (program) {
.option('-e, --elasticsearch ', 'Elasticsearch instance')
.option(
'-c, --config ',
- 'Path to the config file, can be changed with the CONFIG_PATH environment variable as well',
- process.env.CONFIG_PATH || fromRoot('config/kibana.yml'))
+ 'Path to the config file, can be changed with the CONFIG_PATH environment variable as well. ' +
+ 'Use mulitple --config args to include multiple config files.',
+ configPathCollector,
+ [ process.env.CONFIG_PATH || fromRoot('config/kibana.yml') ]
+ )
.option('-p, --port ', 'The port to bind to', parseInt)
.option('-q, --quiet', 'Prevent all logging except errors')
.option('-Q, --silent', 'Prevent all logging')
@@ -64,67 +107,67 @@ module.exports = function (program) {
if (canCluster) {
command
.option('--dev', 'Run the server with development mode defaults')
+ .option('--no-ssl', 'Don\'t run the dev server using HTTPS')
+ .option('--no-base-path', 'Don\'t put a proxy in front of the dev server, which adds a random basePath')
.option('--no-watch', 'Prevents automatic restarts of the server in --dev mode');
}
command
.action(async function (opts) {
- if (canCluster && opts.dev && !isWorker) {
- // stop processing the action and handoff to cluster manager
- let ClusterManager = require('../cluster/ClusterManager');
- new ClusterManager(opts);
- return;
- }
-
- let readYamlConfig = require('./readYamlConfig');
- let KbnServer = src('server/KbnServer');
-
- let settings = readYamlConfig(opts.config);
-
if (opts.dev) {
- try { _.merge(settings, readYamlConfig(fromRoot('config/kibana.dev.yml'))); }
- catch (e) { null; }
+ try {
+ const kbnDevConfig = fromRoot('config/kibana.dev.yml');
+ if (statSync(kbnDevConfig).isFile()) {
+ opts.config.push(kbnDevConfig);
+ }
+ } catch (err) {
+ // ignore, kibana.dev.yml does not exist
+ }
}
- let set = _.partial(_.set, settings);
- let get = _.partial(_.get, settings);
+ const getCurrentSettings = () => readServerSettings(opts, this.getUnknownOptions());
+ const settings = getCurrentSettings();
- if (opts.dev) {
- set('env', 'development');
- set('optimize.lazy', true);
+ if (canCluster && opts.dev && !isWorker) {
+ // stop processing the action and handoff to cluster manager
+ const ClusterManager = require('../cluster/cluster_manager');
+ new ClusterManager(opts, settings);
+ return;
}
- if (opts.elasticsearch) set('elasticsearch.url', opts.elasticsearch);
- if (opts.port) set('server.port', opts.port);
- if (opts.host) set('server.host', opts.host);
- if (opts.quiet) set('logging.quiet', true);
- if (opts.silent) set('logging.silent', true);
- if (opts.verbose) set('logging.verbose', true);
- if (opts.logFile) set('logging.dest', opts.logFile);
-
- set('plugins.scanDirs', _.compact([].concat(
- get('plugins.scanDirs'),
- opts.pluginDir
- )));
-
- set('plugins.paths', [].concat(opts.pluginPath || []));
-
let kbnServer = {};
-
+ const KbnServer = require('../../server/kbn_server');
try {
- kbnServer = new KbnServer(_.merge(settings, this.getUnknownOptions()));
+ kbnServer = new KbnServer(settings);
await kbnServer.ready();
}
catch (err) {
- let { server } = kbnServer;
+ const { server } = kbnServer;
- if (server) server.log(['fatal'], err);
- console.error('FATAL', err);
+ if (err.code === 'EADDRINUSE') {
+ logFatal(`Port ${err.port} is already in use. Another instance of Kibana may be running!`, server);
+ } else {
+ logFatal(err, server);
+ }
kbnServer.close();
process.exit(1); // eslint-disable-line no-process-exit
}
+ process.on('SIGHUP', function reloadConfig() {
+ const settings = getCurrentSettings();
+ kbnServer.server.log(['info', 'config'], 'Reloading logging configuration due to SIGHUP.');
+ kbnServer.applyLoggingConfiguration(settings);
+ kbnServer.server.log(['info', 'config'], 'Reloaded logging configuration due to SIGHUP.');
+ });
+
return kbnServer;
});
};
+
+function logFatal(message, server) {
+ if (server) {
+ server.log(['fatal'], message);
+ }
+ console.error('FATAL', message);
+}
diff --git a/src/cli_plugin/cli.js b/src/cli_plugin/cli.js
new file mode 100644
index 0000000000000..5a09416a9cb1a
--- /dev/null
+++ b/src/cli_plugin/cli.js
@@ -0,0 +1,43 @@
+import _ from 'lodash';
+import pkg from '../utils/package_json';
+import Command from '../cli/command';
+import listCommand from './list';
+import installCommand from './install';
+import removeCommand from './remove';
+
+let argv = process.env.kbnWorkerArgv ? JSON.parse(process.env.kbnWorkerArgv) : process.argv.slice();
+let program = new Command('bin/kibana-plugin');
+
+program
+.version(pkg.version)
+.description(
+ 'The Kibana plugin manager enables you to install and remove plugins that ' +
+ 'provide additional functionality to Kibana'
+);
+
+listCommand(program);
+installCommand(program);
+removeCommand(program);
+
+program
+.command('help ')
+.description('get the help for a specific command')
+.action(function (cmdName) {
+ let cmd = _.find(program.commands, { _name: cmdName });
+ if (!cmd) return program.error(`unknown command ${cmdName}`);
+ cmd.help();
+});
+
+program
+.command('*', null, { noHelp: true })
+.action(function (cmd, options) {
+ program.error(`unknown command ${cmd}`);
+});
+
+// check for no command name
+let subCommand = argv[2] && !String(argv[2][0]).match(/^-|^\.|\//);
+if (!subCommand) {
+ program.defaultHelp();
+}
+
+program.parse(argv);
diff --git a/src/cli_plugin/index.js b/src/cli_plugin/index.js
new file mode 100644
index 0000000000000..a0cd17d62f151
--- /dev/null
+++ b/src/cli_plugin/index.js
@@ -0,0 +1,5 @@
+// load the babel options seperately so that they can modify the process.env
+// before calling babel/register
+const babelOptions = require('../optimize/babel_options').node;
+require('babel/register')(babelOptions);
+require('./cli');
diff --git a/src/cli_plugin/install/__tests__/cleanup.js b/src/cli_plugin/install/__tests__/cleanup.js
new file mode 100644
index 0000000000000..c8812c8dedf8f
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/cleanup.js
@@ -0,0 +1,143 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import fs from 'fs';
+import rimraf from 'rimraf';
+
+import { cleanPrevious, cleanArtifacts } from '../cleanup';
+import Logger from '../../lib/logger';
+
+describe('kibana cli', function () {
+
+ describe('plugin installer', function () {
+
+ describe('pluginCleaner', function () {
+ const settings = {
+ workingPath: 'dummy'
+ };
+
+ describe('cleanPrevious', function () {
+ let cleaner;
+ let errorStub;
+ let logger;
+ let progress;
+ let request;
+
+ beforeEach(function () {
+ errorStub = sinon.stub();
+ logger = new Logger(settings);
+ sinon.stub(logger, 'log');
+ sinon.stub(logger, 'error');
+ request = {
+ abort: sinon.stub(),
+ emit: sinon.stub()
+ };
+ });
+
+ afterEach(function () {
+ logger.log.restore();
+ logger.error.restore();
+ fs.statSync.restore();
+ rimraf.sync.restore();
+ });
+
+ it('should resolve if the working path does not exist', function () {
+ sinon.stub(rimraf, 'sync');
+ sinon.stub(fs, 'statSync', function () {
+ const error = new Error('ENOENT');
+ error.code = 'ENOENT';
+ throw error;
+ });
+
+ return cleanPrevious(settings, logger)
+ .catch(errorStub)
+ .then(function (data) {
+ expect(errorStub.called).to.be(false);
+ });
+ });
+
+ it('should rethrow any exception except ENOENT from fs.statSync', function () {
+ sinon.stub(rimraf, 'sync');
+ sinon.stub(fs, 'statSync', function () {
+ const error = new Error('An Unhandled Error');
+ throw error;
+ });
+
+ errorStub = sinon.stub();
+ return cleanPrevious(settings, logger)
+ .catch(errorStub)
+ .then(function () {
+ expect(errorStub.called).to.be(true);
+ });
+ });
+
+ it('should log a message if there was a working directory', function () {
+ sinon.stub(rimraf, 'sync');
+ sinon.stub(fs, 'statSync');
+
+ return cleanPrevious(settings, logger)
+ .catch(errorStub)
+ .then(function (data) {
+ expect(logger.log.calledWith('Found previous install attempt. Deleting...')).to.be(true);
+ });
+ });
+
+ it('should rethrow any exception from rimraf.sync', function () {
+ sinon.stub(fs, 'statSync');
+ sinon.stub(rimraf, 'sync', function () {
+ throw new Error('I am an error thrown by rimraf');
+ });
+
+ errorStub = sinon.stub();
+ return cleanPrevious(settings, logger)
+ .catch(errorStub)
+ .then(function () {
+ expect(errorStub.called).to.be(true);
+ });
+ });
+
+ it('should resolve if the working path is deleted', function () {
+ sinon.stub(rimraf, 'sync');
+ sinon.stub(fs, 'statSync');
+
+ return cleanPrevious(settings, logger)
+ .catch(errorStub)
+ .then(function (data) {
+ expect(errorStub.called).to.be(false);
+ });
+ });
+
+ });
+
+ describe('cleanArtifacts', function () {
+ let logger;
+
+ beforeEach(function () {
+ logger = new Logger(settings);
+ });
+
+ afterEach(function () {
+ rimraf.sync.restore();
+ });
+
+ it('should attempt to delete the working directory', function () {
+ sinon.stub(rimraf, 'sync');
+
+ cleanArtifacts(settings);
+ expect(rimraf.sync.calledWith(settings.workingPath)).to.be(true);
+ });
+
+ it('should swallow any errors thrown by rimraf.sync', function () {
+ sinon.stub(rimraf, 'sync', function () {
+ throw new Error('Something bad happened.');
+ });
+
+ expect(cleanArtifacts).withArgs(settings).to.not.throwError();
+ });
+
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/install/__tests__/download.js b/src/cli_plugin/install/__tests__/download.js
new file mode 100644
index 0000000000000..d38728fe25578
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/download.js
@@ -0,0 +1,227 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import nock from 'nock';
+import glob from 'glob-all';
+import rimraf from 'rimraf';
+import mkdirp from 'mkdirp';
+import Logger from '../../lib/logger';
+import { UnsupportedProtocolError } from '../../lib/errors';
+import { download, _downloadSingle } from '../download';
+import { join } from 'path';
+
+describe('kibana cli', function () {
+
+ describe('plugin downloader', function () {
+ const testWorkingPath = join(__dirname, '.test.data');
+ const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
+
+ const settings = {
+ urls: [],
+ workingPath: testWorkingPath,
+ tempArchiveFile: tempArchiveFilePath,
+ timeout: 0
+ };
+ const logger = new Logger(settings);
+
+ function expectWorkingPathEmpty() {
+ const files = glob.sync('**/*', { cwd: testWorkingPath });
+ expect(files).to.eql([]);
+ }
+
+ function expectWorkingPathNotEmpty() {
+ const files = glob.sync('**/*', { cwd: testWorkingPath });
+ const expected = [
+ 'archive.part'
+ ];
+
+ expect(files.sort()).to.eql(expected.sort());
+ }
+
+ function shouldReject() {
+ throw new Error('expected the promise to reject');
+ }
+
+ beforeEach(function () {
+ sinon.stub(logger, 'log');
+ sinon.stub(logger, 'error');
+ rimraf.sync(testWorkingPath);
+ mkdirp.sync(testWorkingPath);
+ });
+
+ afterEach(function () {
+ logger.log.restore();
+ logger.error.restore();
+ rimraf.sync(testWorkingPath);
+ });
+
+ describe('_downloadSingle', function () {
+
+ beforeEach(function () {
+ });
+
+ describe('http downloader', function () {
+
+ it('should throw an ENOTFOUND error for a http ulr that returns 404', function () {
+ const couchdb = nock('http://example.com')
+ .get('/plugin.tar.gz')
+ .reply(404);
+
+ const sourceUrl = 'http://example.com/plugin.tar.gz';
+
+ return _downloadSingle(settings, logger, sourceUrl)
+ .then(shouldReject, function (err) {
+ expect(err.message).to.match(/ENOTFOUND/);
+ expectWorkingPathEmpty();
+ });
+ });
+
+ it('should throw an UnsupportedProtocolError for an invalid url', function () {
+ const sourceUrl = 'i am an invalid url';
+
+ return _downloadSingle(settings, logger, sourceUrl)
+ .then(shouldReject, function (err) {
+ expect(err).to.be.an(UnsupportedProtocolError);
+ expectWorkingPathEmpty();
+ });
+ });
+
+ it('should download a file from a valid http url', function () {
+ const filePath = join(__dirname, 'replies/banana.jpg');
+
+ const couchdb = nock('http://example.com')
+ .defaultReplyHeaders({
+ 'content-length': '341965',
+ 'content-type': 'application/zip'
+ })
+ .get('/plugin.zip')
+ .replyWithFile(200, filePath);
+
+ const sourceUrl = 'http://example.com/plugin.zip';
+
+ return _downloadSingle(settings, logger, sourceUrl)
+ .then(function () {
+ expectWorkingPathNotEmpty();
+ });
+ });
+
+ });
+
+ describe('local file downloader', function () {
+
+ it('should throw an ENOTFOUND error for an invalid local file', function () {
+ const filePath = join(__dirname, 'replies/i-am-not-there.zip');
+ const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
+
+ return _downloadSingle(settings, logger, sourceUrl)
+ .then(shouldReject, function (err) {
+ expect(err.message).to.match(/ENOTFOUND/);
+ expectWorkingPathEmpty();
+ });
+ });
+
+ it('should copy a valid local file', function () {
+ const filePath = join(__dirname, 'replies/banana.jpg');
+ const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
+
+ return _downloadSingle(settings, logger, sourceUrl)
+ .then(function () {
+ expectWorkingPathNotEmpty();
+ });
+ });
+
+ });
+
+ });
+
+ describe('download', function () {
+ it('should loop through bad urls until it finds a good one.', function () {
+ const filePath = join(__dirname, 'replies/test_plugin.zip');
+ settings.urls = [
+ 'http://example.com/badfile1.tar.gz',
+ 'http://example.com/badfile2.tar.gz',
+ 'I am a bad uri',
+ 'http://example.com/goodfile.tar.gz'
+ ];
+
+ const couchdb = nock('http://example.com')
+ .defaultReplyHeaders({
+ 'content-length': '10'
+ })
+ .get('/badfile1.tar.gz')
+ .reply(404)
+ .get('/badfile2.tar.gz')
+ .reply(404)
+ .get('/goodfile.tar.gz')
+ .replyWithFile(200, filePath);
+
+ return download(settings, logger)
+ .then(function () {
+ expect(logger.log.getCall(0).args[0]).to.match(/badfile1.tar.gz/);
+ expect(logger.log.getCall(1).args[0]).to.match(/badfile2.tar.gz/);
+ expect(logger.log.getCall(2).args[0]).to.match(/I am a bad uri/);
+ expect(logger.log.getCall(3).args[0]).to.match(/goodfile.tar.gz/);
+ expectWorkingPathNotEmpty();
+ });
+ });
+
+ it('should stop looping through urls when it finds a good one.', function () {
+ const filePath = join(__dirname, 'replies/test_plugin.zip');
+ settings.urls = [
+ 'http://example.com/badfile1.tar.gz',
+ 'http://example.com/badfile2.tar.gz',
+ 'http://example.com/goodfile.tar.gz',
+ 'http://example.com/badfile3.tar.gz'
+ ];
+
+ const couchdb = nock('http://example.com')
+ .defaultReplyHeaders({
+ 'content-length': '10'
+ })
+ .get('/badfile1.tar.gz')
+ .reply(404)
+ .get('/badfile2.tar.gz')
+ .reply(404)
+ .get('/goodfile.tar.gz')
+ .replyWithFile(200, filePath)
+ .get('/badfile3.tar.gz')
+ .reply(404);
+
+ return download(settings, logger)
+ .then(function () {
+ for (let i = 0; i < logger.log.callCount; i++) {
+ expect(logger.log.getCall(i).args[0]).to.not.match(/badfile3.tar.gz/);
+ }
+ expectWorkingPathNotEmpty();
+ });
+ });
+
+ it('should throw an error when it doesn\'t find a good url.', function () {
+ settings.urls = [
+ 'http://example.com/badfile1.tar.gz',
+ 'http://example.com/badfile2.tar.gz',
+ 'http://example.com/badfile3.tar.gz'
+ ];
+
+ const couchdb = nock('http://example.com')
+ .defaultReplyHeaders({
+ 'content-length': '10'
+ })
+ .get('/badfile1.tar.gz')
+ .reply(404)
+ .get('/badfile2.tar.gz')
+ .reply(404)
+ .get('/badfile3.tar.gz')
+ .reply(404);
+
+ return download(settings, logger)
+ .then(shouldReject, function (err) {
+ expect(err.message).to.match(/no valid url specified/i);
+ expectWorkingPathEmpty();
+ });
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/install/__tests__/index.js b/src/cli_plugin/install/__tests__/index.js
new file mode 100644
index 0000000000000..470d9452a1c12
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/index.js
@@ -0,0 +1,76 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import index from '../index';
+
+describe('kibana cli', function () {
+
+ describe('plugin installer', function () {
+
+ describe('commander options', function () {
+
+ let program = {
+ command: function () { return program; },
+ description: function () { return program; },
+ option: function () { return program; },
+ action: function () { return program; }
+ };
+
+ it('should define the command', function () {
+ sinon.spy(program, 'command');
+
+ index(program);
+ expect(program.command.calledWith('install ')).to.be(true);
+
+ program.command.restore();
+ });
+
+ it('should define the description', function () {
+ sinon.spy(program, 'description');
+
+ index(program);
+ expect(program.description.calledWith('install a plugin')).to.be(true);
+
+ program.description.restore();
+ });
+
+ it('should define the command line options', function () {
+ const spy = sinon.spy(program, 'option');
+
+ const options = [
+ /-q/,
+ /-s/,
+ /-c/,
+ /-t/,
+ /-d/
+ ];
+
+ index(program);
+
+ for (let i = 0; i < spy.callCount; i++) {
+ const call = spy.getCall(i);
+ for (let o = 0; o < options.length; o++) {
+ const option = options[o];
+ if (call.args[0].match(option)) {
+ options.splice(o, 1);
+ break;
+ }
+ }
+ }
+
+ expect(options).to.have.length(0);
+ });
+
+ it('should call the action function', function () {
+ sinon.spy(program, 'action');
+
+ index(program);
+ expect(program.action.calledOnce).to.be(true);
+
+ program.action.restore();
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/install/__tests__/kibana.js b/src/cli_plugin/install/__tests__/kibana.js
new file mode 100644
index 0000000000000..2ae0a1e2589ab
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/kibana.js
@@ -0,0 +1,170 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import Logger from '../../lib/logger';
+import { join } from 'path';
+import rimraf from 'rimraf';
+import mkdirp from 'mkdirp';
+import { existingInstall, assertVersion } from '../kibana';
+
+describe('kibana cli', function () {
+
+ describe('plugin installer', function () {
+
+ describe('kibana', function () {
+ const testWorkingPath = join(__dirname, '.test.data');
+ const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
+
+ const settings = {
+ workingPath: testWorkingPath,
+ tempArchiveFile: tempArchiveFilePath,
+ plugin: 'test-plugin',
+ version: '1.0.0',
+ plugins: [ { name: 'foo', path: join(testWorkingPath, 'foo') } ]
+ };
+
+ const logger = new Logger(settings);
+
+ describe('assertVersion', function () {
+
+ beforeEach(function () {
+ rimraf.sync(testWorkingPath);
+ mkdirp.sync(testWorkingPath);
+ sinon.stub(logger, 'log');
+ sinon.stub(logger, 'error');
+ });
+
+ afterEach(function () {
+ logger.log.restore();
+ logger.error.restore();
+ rimraf.sync(testWorkingPath);
+ });
+
+ it('should succeed with exact match', function () {
+ const settings = {
+ workingPath: testWorkingPath,
+ tempArchiveFile: tempArchiveFilePath,
+ plugin: 'test-plugin',
+ version: '5.0.0-snapshot',
+ plugins: [ { name: 'foo', path: join(testWorkingPath, 'foo'), version: '5.0.0-snapshot' } ]
+ };
+ const errorStub = sinon.stub();
+
+ try {
+ assertVersion(settings);
+ }
+ catch (err) {
+ errorStub(err);
+ }
+
+ expect(errorStub.called).to.be(false);
+ });
+
+ it('should throw an error if plugin does contain a version.', function () {
+ const errorStub = sinon.stub();
+
+ try {
+ assertVersion(settings);
+ }
+ catch (err) {
+ errorStub(err);
+ }
+
+ expect(errorStub.firstCall.args[0]).to.match(/plugin version not found/i);
+ });
+
+ it('should throw an error if plugin version does does not match kibana version', function () {
+ const errorStub = sinon.stub();
+ settings.plugins[0].version = '1.2.3.4';
+
+ try {
+ assertVersion(settings);
+ }
+ catch (err) {
+ errorStub(err);
+ }
+
+ expect(errorStub.firstCall.args[0]).to.match(/incorrect version/i);
+ });
+
+ it('should not throw an error if plugin version matches kibana version', function () {
+ const errorStub = sinon.stub();
+ settings.plugins[0].version = '1.0.0';
+
+ try {
+ assertVersion(settings);
+ }
+ catch (err) {
+ errorStub(err);
+ }
+
+ expect(errorStub.called).to.be(false);
+ });
+
+ it('should ignore version info after the dash in checks on valid version', function () {
+ const errorStub = sinon.stub();
+ settings.plugins[0].version = '1.0.0-foo-bar-version-1.2.3';
+
+ try {
+ assertVersion(settings);
+ }
+ catch (err) {
+ errorStub(err);
+ }
+
+ expect(errorStub.called).to.be(false);
+ });
+
+ it('should ignore version info after the dash in checks on invalid version', function () {
+ const errorStub = sinon.stub();
+ settings.plugins[0].version = '2.0.0-foo-bar-version-1.2.3';
+
+ try {
+ assertVersion(settings);
+ }
+ catch (err) {
+ errorStub(err);
+ }
+
+ expect(errorStub.firstCall.args[0]).to.match(/incorrect version/i);
+ });
+ });
+
+ describe('existingInstall', function () {
+ let testWorkingPath;
+ let processExitStub;
+
+ beforeEach(function () {
+ processExitStub = sinon.stub(process, 'exit');
+ testWorkingPath = join(__dirname, '.test.data');
+ rimraf.sync(testWorkingPath);
+ sinon.stub(logger, 'log');
+ sinon.stub(logger, 'error');
+ });
+
+ afterEach(function () {
+ processExitStub.restore();
+ logger.log.restore();
+ logger.error.restore();
+ rimraf.sync(testWorkingPath);
+ });
+
+ it('should throw an error if the workingPath already exists.', function () {
+ mkdirp.sync(settings.plugins[0].path);
+ existingInstall(settings, logger);
+
+ expect(logger.error.firstCall.args[0]).to.match(/already exists/);
+ expect(process.exit.called).to.be(true);
+ });
+
+ it('should not throw an error if the workingPath does not exist.', function () {
+ existingInstall(settings, logger);
+ expect(logger.error.called).to.be(false);
+ });
+
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/install/__tests__/pack.js b/src/cli_plugin/install/__tests__/pack.js
new file mode 100644
index 0000000000000..433df5bc0a0db
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/pack.js
@@ -0,0 +1,174 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import glob from 'glob-all';
+import rimraf from 'rimraf';
+import mkdirp from 'mkdirp';
+import Logger from '../../lib/logger';
+import { extract, getPackData } from '../pack';
+import { _downloadSingle } from '../download';
+import { join } from 'path';
+
+describe('kibana cli', function () {
+
+ describe('pack', function () {
+
+ const testWorkingPath = join(__dirname, '.test.data');
+ const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
+ const testPluginPath = join(testWorkingPath, '.installedPlugins');
+ let logger;
+
+ const settings = {
+ workingPath: testWorkingPath,
+ tempArchiveFile: tempArchiveFilePath,
+ pluginDir: testPluginPath,
+ plugin: 'test-plugin'
+ };
+
+ beforeEach(function () {
+ logger = new Logger(settings);
+ sinon.stub(logger, 'log');
+ sinon.stub(logger, 'error');
+ rimraf.sync(testWorkingPath);
+ mkdirp.sync(testWorkingPath);
+ });
+
+ afterEach(function () {
+ logger.log.restore();
+ logger.error.restore();
+ rimraf.sync(testWorkingPath);
+ });
+
+ function copyReplyFile(filename) {
+ const filePath = join(__dirname, 'replies', filename);
+ const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
+
+ return _downloadSingle(settings, logger, sourceUrl);
+ }
+
+ function shouldReject() {
+ throw new Error('expected the promise to reject');
+ }
+
+ describe('extract', function () {
+
+ //Also only extracts the content from the kibana folder.
+ //Ignores the others.
+ it('successfully extract a valid zip', function () {
+ return copyReplyFile('test_plugin.zip')
+ .then(() => {
+ return getPackData(settings, logger);
+ })
+ .then(() => {
+ return extract(settings, logger);
+ })
+ .then(() => {
+ const files = glob.sync('**/*', { cwd: testWorkingPath });
+ const expected = [
+ 'archive.part',
+ 'README.md',
+ 'index.js',
+ 'package.json',
+ 'public',
+ 'public/app.js',
+ 'extra file only in zip.txt'
+ ];
+ expect(files.sort()).to.eql(expected.sort());
+ });
+ });
+
+ });
+
+ describe('getPackData', function () {
+
+ it('populate settings.plugins', function () {
+ return copyReplyFile('test_plugin.zip')
+ .then(() => {
+ return getPackData(settings, logger);
+ })
+ .then(() => {
+ expect(settings.plugins[0].name).to.be('test-plugin');
+ expect(settings.plugins[0].folder).to.be('test-plugin');
+ expect(settings.plugins[0].version).to.be('1.0.0');
+ expect(settings.plugins[0].platform).to.be(undefined);
+ });
+ });
+
+ it('populate settings.plugins with multiple plugins', function () {
+ return copyReplyFile('test_plugin_many.zip')
+ .then(() => {
+ return getPackData(settings, logger);
+ })
+ .then(() => {
+ expect(settings.plugins[0].name).to.be('funger-plugin');
+ expect(settings.plugins[0].file).to.be('kibana/funger-plugin/package.json');
+ expect(settings.plugins[0].folder).to.be('funger-plugin');
+ expect(settings.plugins[0].version).to.be('1.0.0');
+ expect(settings.plugins[0].platform).to.be(undefined);
+
+ expect(settings.plugins[1].name).to.be('pdf');
+ expect(settings.plugins[1].file).to.be('kibana/pdf-linux/package.json');
+ expect(settings.plugins[1].folder).to.be('pdf-linux');
+ expect(settings.plugins[1].version).to.be('1.0.0');
+ expect(settings.plugins[1].platform).to.be('linux');
+
+ expect(settings.plugins[2].name).to.be('pdf');
+ expect(settings.plugins[2].file).to.be('kibana/pdf-win32/package.json');
+ expect(settings.plugins[2].folder).to.be('pdf-win32');
+ expect(settings.plugins[2].version).to.be('1.0.0');
+ expect(settings.plugins[2].platform).to.be('win32');
+
+ expect(settings.plugins[3].name).to.be('pdf');
+ expect(settings.plugins[3].file).to.be('kibana/pdf-win64/package.json');
+ expect(settings.plugins[3].folder).to.be('pdf-win64');
+ expect(settings.plugins[3].version).to.be('1.0.0');
+ expect(settings.plugins[3].platform).to.be('win64');
+
+ expect(settings.plugins[4].name).to.be('pdf');
+ expect(settings.plugins[4].file).to.be('kibana/pdf/package.json');
+ expect(settings.plugins[4].folder).to.be('pdf');
+ expect(settings.plugins[4].version).to.be('1.0.0');
+ expect(settings.plugins[4].platform).to.be(undefined);
+
+ expect(settings.plugins[5].name).to.be('test-plugin');
+ expect(settings.plugins[5].file).to.be('kibana/test-plugin/package.json');
+ expect(settings.plugins[5].folder).to.be('test-plugin');
+ expect(settings.plugins[5].version).to.be('1.0.0');
+ expect(settings.plugins[5].platform).to.be(undefined);
+ });
+ });
+
+ it('throw an error if there is no kibana plugin', function () {
+ return copyReplyFile('test_plugin_no_kibana.zip')
+ .then((data) => {
+ return getPackData(settings, logger);
+ })
+ .then(shouldReject, (err) => {
+ expect(err.message).to.match(/No kibana plugins found in archive/i);
+ });
+ });
+
+ it('throw an error with a corrupt zip', function () {
+ return copyReplyFile('corrupt.zip')
+ .then((data) => {
+ return getPackData(settings, logger);
+ })
+ .then(shouldReject, (err) => {
+ expect(err.message).to.match(/error retrieving/i);
+ });
+ });
+
+ it('throw an error if there an invalid plugin name', function () {
+ return copyReplyFile('invalid_name.zip')
+ .then((data) => {
+ return getPackData(settings, logger);
+ })
+ .then(shouldReject, (err) => {
+ expect(err.message).to.match(/invalid plugin name/i);
+ });
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/install/__tests__/progress.js b/src/cli_plugin/install/__tests__/progress.js
new file mode 100644
index 0000000000000..15f4fd9a1bc18
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/progress.js
@@ -0,0 +1,95 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import Progress from '../progress';
+import Logger from '../../lib/logger';
+
+describe('kibana cli', function () {
+
+ describe('plugin installer', function () {
+
+ describe('progressReporter', function () {
+ let logger;
+ let progress;
+ let request;
+
+ beforeEach(function () {
+ logger = new Logger({ silent: false, quiet: false });
+ sinon.stub(logger, 'log');
+ sinon.stub(logger, 'error');
+ progress = new Progress(logger);
+ });
+
+ afterEach(function () {
+ logger.log.restore();
+ logger.error.restore();
+ });
+
+ describe('handleData', function () {
+
+ it('should show a max of 20 dots for full progress', function () {
+ progress.init(1000);
+ progress.progress(1000);
+ progress.complete();
+
+ expect(logger.log.callCount).to.be(22);
+ expect(logger.log.getCall(0).args[0]).to.match(/transfer/i);
+ expect(logger.log.getCall(1).args[0]).to.be('.');
+ expect(logger.log.getCall(2).args[0]).to.be('.');
+ expect(logger.log.getCall(3).args[0]).to.be('.');
+ expect(logger.log.getCall(4).args[0]).to.be('.');
+ expect(logger.log.getCall(5).args[0]).to.be('.');
+ expect(logger.log.getCall(6).args[0]).to.be('.');
+ expect(logger.log.getCall(7).args[0]).to.be('.');
+ expect(logger.log.getCall(8).args[0]).to.be('.');
+ expect(logger.log.getCall(9).args[0]).to.be('.');
+ expect(logger.log.getCall(10).args[0]).to.be('.');
+ expect(logger.log.getCall(11).args[0]).to.be('.');
+ expect(logger.log.getCall(12).args[0]).to.be('.');
+ expect(logger.log.getCall(13).args[0]).to.be('.');
+ expect(logger.log.getCall(14).args[0]).to.be('.');
+ expect(logger.log.getCall(15).args[0]).to.be('.');
+ expect(logger.log.getCall(16).args[0]).to.be('.');
+ expect(logger.log.getCall(17).args[0]).to.be('.');
+ expect(logger.log.getCall(18).args[0]).to.be('.');
+ expect(logger.log.getCall(19).args[0]).to.be('.');
+ expect(logger.log.getCall(20).args[0]).to.be('.');
+ expect(logger.log.getCall(21).args[0]).to.match(/complete/i);
+ });
+
+ it('should show dot for each 5% of completion', function () {
+ progress.init(1000);
+ expect(logger.log.callCount).to.be(1);
+
+ progress.progress(50); //5%
+ expect(logger.log.callCount).to.be(2);
+
+ progress.progress(100); //15%
+ expect(logger.log.callCount).to.be(4);
+
+ progress.progress(200); //25%
+ expect(logger.log.callCount).to.be(8);
+
+ progress.progress(590); //94%
+ expect(logger.log.callCount).to.be(20);
+
+ progress.progress(60); //100%
+ expect(logger.log.callCount).to.be(21);
+
+ //Any progress over 100% should be ignored.
+ progress.progress(9999);
+ expect(logger.log.callCount).to.be(21);
+
+ progress.complete();
+ expect(logger.log.callCount).to.be(22);
+
+ expect(logger.log.getCall(0).args[0]).to.match(/transfer/i);
+ expect(logger.log.getCall(21).args[0]).to.match(/complete/i);
+ });
+
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/install/__tests__/replies/banana.jpg b/src/cli_plugin/install/__tests__/replies/banana.jpg
new file mode 100644
index 0000000000000..a5b15a5fc1e92
Binary files /dev/null and b/src/cli_plugin/install/__tests__/replies/banana.jpg differ
diff --git a/src/cli_plugin/install/__tests__/replies/corrupt.zip b/src/cli_plugin/install/__tests__/replies/corrupt.zip
new file mode 100644
index 0000000000000..7fbdeef66dc3e
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/replies/corrupt.zip
@@ -0,0 +1,97 @@
+504b 0304 1400 0000 0000 d575 7147 0000
+0000 0000 0000 0000 0000 1300 0000 7465
+7374 2d70 6c75 6769 6e2d 6d61 7374 6572
+2f50 4b03 040a 0000 0000 00f2 63d8 46a5
+06bf 880c 0000 000c 0000 001d 0000 0074
+6573 742d 706c 7567 696e 2d6d 6173 7465
+722f 2e67 6974 6967 6e6f 7265 6e6f 6465
+5f6d 6f64 756c 6573 504b 0304 1400 0000
+0800 f263 d846 38c6 e53d 9d00 0000 ee00
+0000 1b00 0000 7465 7374 2d70 6c75 6769
+6e2d 6d61 7374 6572 2f69 6e64 6578 2e6a
+733d 8cc1 0e82 3010 44ef 7cc5 de80 4469
+133d 413c f807 1efc 8182 ab36 96ed 06b6
+d1c4 f0ef 16a8 cc61 9399 7d33 bdbf 0587
+157e d80f 32c2 09ee 813a b19e a078 d9d6
+9029 e19b 010c 2861 2020 7cc3 1a57 1717
+1e96 8af8 8c4a f57a 6617 19e6 c524 8915
+8735 e457 1c05 d626 9c99 f3dd 46d8 ce53
+049e 225c 2bc5 ce74 d89a 9855 84a2 8e5a
+ab83 d611 dff8 ded8 99e7 656b 5412 87f7
+ab51 260e 276e cafe 772a 9b6c 6a7e 504b
+0304 1400 0000 0800 f263 d846 5c85 06c2
+0901 0000 dc01 0000 1f00 0000 7465 7374
+2d70 6c75 6769 6e2d 6d61 7374 6572 2f70
+6163 6b61 6765 2e6a 736f 6e5d 90cd 6ec3
+2010 84ef 790a ea4b 5a29 218e dd46 6a6e
+7d8f a812 c62b 1b97 0262 975a 5695 772f
+60e7 a7e1 c67c bb33 03bf 2bc6 0a23 bea1
+38b2 8200 69eb 74e8 9429 3609 fc80 4765
+4d62 7b5e f272 565b 40e9 95a3 850c 0189
+0996 96d9 fdb2 0767 5191 f553 9c4a 3951
+a3c9 e5a4 4e51 1cca 52f0 3a29 3d91 3bee
+7623 3471 0778 1a88 fc9c 9de6 38bc d944
+6352 0649 e8bc 6b6c 0b6c 0b6c 2dad 41ab
+816b db3d 9f8a 78eb bca0 a045 aa8a 1b36
+d9c0 466b 9efe 9f53 f1b2 ce59 cbe3 1c98
+168c 5470 17d8 e800 8df2 6d4a fbac f83b
+afcb 4b7f d022 9691 7cc0 0cf7 bce2 8f0c
+4178 d967 fcc6 cb1b 1eac cae2 81bf f2fa
+226a db0a 9c87 eb18 74d5 470f f26b f138
+448f 6b63 ad24 18cc dffa e184 ec61 5b25
+7c5e fd01 504b 0304 1400 0000 0000 d575
+7147 0000 0000 0000 0000 0000 0000 1a00
+0000 7465 7374 2d70 6c75 6769 6e2d 6d61
+7374 6572 2f70 7562 6c69 632f 504b 0304
+1400 0000 0800 f263 d846 674a 6865 4a00
+0000 4e00 0000 2000 0000 7465 7374 2d70
+6c75 6769 6e2d 6d61 7374 6572 2f70 7562
+6c69 632f 6170 702e 6a73 05c1 c10d 8020
+1004 c0bf 55ac 2fa1 062b f169 6091 4bc8
+a178 e7c7 d8bb 3399 4594 a1b8 2693 ae08
+8397 cb60 c43b 017b e3b0 b06c dd51 f787
+104d cd33 33ac 12c6 db70 363f 44e7 25ae
+d317 d71f 504b 0304 0a00 0000 0000 f263
+d846 ac2f 0f2b 1200 0000 1200 0000 1c00
+0000 7465 7374 2d70 6c75 6769 6e2d 6d61
+7374 6572 2f52 4541 444d 452e 6d64 4920
+616d 2061 2074 6573 7420 706c 7567 696e
+504b 0304 1400 0000 0000 4b7e 7147 0000
+0000 0000 0000 0000 0000 2d00 0000 7465
+7374 2d70 6c75 6769 6e2d 6d61 7374 6572
+2f65 7874 7261 2066 696c 6520 6f6e 6c79
+2069 6e20 7a69 702e 7478 7450 4b01 0214
+0014 0000 0000 00d5 7571 4700 0000 0000
+0000 0000 0000 0013 0024 0000 0000 0000
+0010 0000 0000 0000 0074 6573 742d 706c
+7567 696e 2d6d 6173 7465 722f 0a00 2000
+0000 0000 0100 1800 4634 e20f 7921 d101
+4634 e20f 7921 d101 d449 e10f 7921 d101
+504b 0102 1400 0a00 0000 0000 f263 d846
+a506 bf88 0c00 0000 0c00 0000 1d00 2400
+0000 0000 0000 2000 0000 3100 0000 7465
+7374 2d70 6c75 6769 6e2d 6d61 7374 6572
+2f2e 6769 7469 676e 6f72 650a 0020 0000
+0000 0001 0018 0000 f483 00ac aed0 0179
+98e1 0f79 21d1 017
+0000 0008 00f2 63d8 4667 4a68 654a 0000
+004e 0000 0020 0024 0000 0000 0000 0020
+0000 00cc 0200 0074 6573 742d 706c 7567
+696e 2d6d 6173 7465 722f 7075 626c 6963
+2f61 7070 2e6a 730a 0020 0000 0000 0001
+0018 0000 f483 00ac aed0 015b 5be2 0f79
+21d1 015b 5be2 0f79 21d1 0150 4b01 0214
+000a 0000 0000 00f2 63d8 46ac 2f0f 2b12
+0000 0012 0000 001c 0024 0000 0000 0000
+0020 0000 0054 0300 0074 6573 742d 706c
+7567 696e 2d6d 6173 7465 722f 5245 4144
+4d45 2e6d 640a 0020 0000 0000 0001 0018
+0000 f483 00ac aed0 014e 0de2 0f79 21d1
+014e 0de2 0f79 21d1 0150 4b01 0214 0014
+0000 0000 004b 7e71 4700 0000 0000 0000
+0000 0000 002d 0000 0000 0000 0000 0020
+0000 00a0 0300 0074 6573 742d 706c 7567
+696e 2d6d 6173 7465 722f 6578 7472 6120
+6669 6c65 206f 6e6c 7920 696e 207a 6970
+2e74 7874 504b 0506 0000 0000 0800 0800
+5903 0000 eb03 0000 0000
diff --git a/src/cli_plugin/install/__tests__/replies/invalid_name.zip b/src/cli_plugin/install/__tests__/replies/invalid_name.zip
new file mode 100644
index 0000000000000..5de9a0677b6cb
Binary files /dev/null and b/src/cli_plugin/install/__tests__/replies/invalid_name.zip differ
diff --git a/src/cli_plugin/install/__tests__/replies/package.no_version.json b/src/cli_plugin/install/__tests__/replies/package.no_version.json
new file mode 100644
index 0000000000000..874c3e5a04601
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/replies/package.no_version.json
@@ -0,0 +1,3 @@
+{
+ "name": "test-plugin",
+}
diff --git a/src/cli_plugin/install/__tests__/replies/test_plugin.zip b/src/cli_plugin/install/__tests__/replies/test_plugin.zip
new file mode 100644
index 0000000000000..1ec957f80128b
Binary files /dev/null and b/src/cli_plugin/install/__tests__/replies/test_plugin.zip differ
diff --git a/src/cli_plugin/install/__tests__/replies/test_plugin_many.zip b/src/cli_plugin/install/__tests__/replies/test_plugin_many.zip
new file mode 100644
index 0000000000000..e4fc8d73feef8
Binary files /dev/null and b/src/cli_plugin/install/__tests__/replies/test_plugin_many.zip differ
diff --git a/src/cli_plugin/install/__tests__/replies/test_plugin_no_kibana.zip b/src/cli_plugin/install/__tests__/replies/test_plugin_no_kibana.zip
new file mode 100644
index 0000000000000..d460c65978c63
Binary files /dev/null and b/src/cli_plugin/install/__tests__/replies/test_plugin_no_kibana.zip differ
diff --git a/src/cli_plugin/install/__tests__/settings.js b/src/cli_plugin/install/__tests__/settings.js
new file mode 100644
index 0000000000000..0dc90ec989f94
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/settings.js
@@ -0,0 +1,228 @@
+import path from 'path';
+import expect from 'expect.js';
+import { fromRoot } from '../../../utils';
+import { resolve } from 'path';
+import { parseMilliseconds, parse, getPlatform } from '../settings';
+
+describe('kibana cli', function () {
+
+ describe('plugin installer', function () {
+
+ describe('command line option parsing', function () {
+
+ describe('parseMilliseconds function', function () {
+
+ it('should return 0 for an empty string', function () {
+ const value = '';
+ const result = parseMilliseconds(value);
+
+ expect(result).to.be(0);
+ });
+
+ it('should return 0 for a number with an invalid unit of measure', function () {
+ const result = parseMilliseconds('1gigablasts');
+ expect(result).to.be(0);
+ });
+
+ it('should assume a number with no unit of measure is specified as milliseconds', function () {
+ const result = parseMilliseconds(1);
+ expect(result).to.be(1);
+
+ const result2 = parseMilliseconds('1');
+ expect(result2).to.be(1);
+ });
+
+ it('should interpret a number with "s" as the unit of measure as seconds', function () {
+ const result = parseMilliseconds('5s');
+ expect(result).to.be(5 * 1000);
+ });
+
+ it('should interpret a number with "second" as the unit of measure as seconds', function () {
+ const result = parseMilliseconds('5second');
+ expect(result).to.be(5 * 1000);
+ });
+
+ it('should interpret a number with "seconds" as the unit of measure as seconds', function () {
+ const result = parseMilliseconds('5seconds');
+ expect(result).to.be(5 * 1000);
+ });
+
+ it('should interpret a number with "m" as the unit of measure as minutes', function () {
+ const result = parseMilliseconds('9m');
+ expect(result).to.be(9 * 1000 * 60);
+ });
+
+ it('should interpret a number with "minute" as the unit of measure as minutes', function () {
+ const result = parseMilliseconds('9minute');
+ expect(result).to.be(9 * 1000 * 60);
+ });
+
+ it('should interpret a number with "minutes" as the unit of measure as minutes', function () {
+ const result = parseMilliseconds('9minutes');
+ expect(result).to.be(9 * 1000 * 60);
+ });
+
+ });
+
+ describe('parse function', function () {
+
+ const command = 'plugin name';
+ let options = {};
+ const kbnPackage = { version: 1234 };
+ beforeEach(function () {
+ options = { pluginDir: fromRoot('installedPlugins') };
+ });
+
+ describe('timeout option', function () {
+
+ it('should default to 0 (milliseconds)', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.timeout).to.be(0);
+ });
+
+ it('should set settings.timeout property', function () {
+ options.timeout = 1234;
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.timeout).to.be(1234);
+ });
+
+ });
+
+ describe('quiet option', function () {
+
+ it('should default to false', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.quiet).to.be(false);
+ });
+
+ it('should set settings.quiet property to true', function () {
+ options.quiet = true;
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.quiet).to.be(true);
+ });
+
+ });
+
+ describe('silent option', function () {
+
+ it('should default to false', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.silent).to.be(false);
+ });
+
+ it('should set settings.silent property to true', function () {
+ options.silent = true;
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.silent).to.be(true);
+ });
+
+ });
+
+ describe('config option', function () {
+
+ it('should default to ZLS', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.config).to.be('');
+ });
+
+ it('should set settings.config property', function () {
+ options.config = 'foo bar baz';
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.config).to.be('foo bar baz');
+ });
+
+ });
+
+ describe('pluginDir option', function () {
+
+ it('should default to installedPlugins', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.pluginDir).to.be(fromRoot('installedPlugins'));
+ });
+
+ it('should set settings.config property', function () {
+ options.pluginDir = 'foo bar baz';
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.pluginDir).to.be('foo bar baz');
+ });
+
+ });
+
+ describe('command value', function () {
+
+ it('should set settings.plugin property', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.plugin).to.be(command);
+ });
+
+ });
+
+ describe('urls collection', function () {
+
+ it('should populate the settings.urls property', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ const expected = [
+ command,
+ `https://download.elastic.co/kibana/${command}/${command}-1234.zip`
+ ];
+
+ expect(settings.urls).to.eql(expected);
+ });
+
+ });
+
+ describe('workingPath value', function () {
+
+ it('should set settings.workingPath property', function () {
+ options.pluginDir = 'foo/bar/baz';
+ const settings = parse(command, options, kbnPackage);
+ const expected = resolve('foo/bar/baz', '.plugin.installing');
+
+ expect(settings.workingPath).to.be(expected);
+ });
+
+ });
+
+ describe('tempArchiveFile value', function () {
+
+ it('should set settings.tempArchiveFile property', function () {
+ options.pluginDir = 'foo/bar/baz';
+ const settings = parse(command, options, kbnPackage);
+ const expected = resolve('foo/bar/baz', '.plugin.installing', 'archive.part');
+
+ expect(settings.tempArchiveFile).to.be(expected);
+ });
+
+ });
+
+ describe('tempPackageFile value', function () {
+
+ it('should set settings.tempPackageFile property', function () {
+ options.pluginDir = 'foo/bar/baz';
+ const settings = parse(command, options, kbnPackage);
+ const expected = resolve('foo/bar/baz', '.plugin.installing', 'package.json');
+
+ expect(settings.tempPackageFile).to.be(expected);
+ });
+
+ });
+
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/install/__tests__/zip.js b/src/cli_plugin/install/__tests__/zip.js
new file mode 100644
index 0000000000000..7cc96fd318c9b
--- /dev/null
+++ b/src/cli_plugin/install/__tests__/zip.js
@@ -0,0 +1,145 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import glob from 'glob-all';
+import rimraf from 'rimraf';
+import mkdirp from 'mkdirp';
+import Logger from '../../lib/logger';
+import { _downloadSingle } from '../download';
+import { join } from 'path';
+import { listFiles, extractFiles } from '../zip';
+
+describe('kibana cli', function () {
+
+ describe('zip', function () {
+
+ const testWorkingPath = join(__dirname, '.test.data');
+ const tempArchiveFilePath = join(testWorkingPath, 'archive.part');
+ let logger;
+
+ const settings = {
+ workingPath: testWorkingPath,
+ tempArchiveFile: tempArchiveFilePath,
+ plugin: 'test-plugin',
+ setPlugin: function (plugin) {}
+ };
+
+ function shouldReject() {
+ throw new Error('expected the promise to reject');
+ }
+
+ beforeEach(function () {
+ logger = new Logger(settings);
+ sinon.stub(logger, 'log');
+ sinon.stub(logger, 'error');
+ sinon.stub(settings, 'setPlugin');
+ rimraf.sync(testWorkingPath);
+ mkdirp.sync(testWorkingPath);
+ });
+
+ afterEach(function () {
+ logger.log.restore();
+ logger.error.restore();
+ settings.setPlugin.restore();
+ rimraf.sync(testWorkingPath);
+ });
+
+ function copyReplyFile(filename) {
+ const filePath = join(__dirname, 'replies', filename);
+ const sourceUrl = 'file://' + filePath.replace(/\\/g, '/');
+
+ return _downloadSingle(settings, logger, sourceUrl);
+ }
+
+ describe('listFiles', function () {
+
+ it('lists the files in the zip', function () {
+ return copyReplyFile('test_plugin.zip')
+ .then(() => {
+ return listFiles(settings.tempArchiveFile);
+ })
+ .then((actual) => {
+ const expected = [
+ 'elasticsearch/',
+ 'kibana/',
+ 'kibana/test-plugin/',
+ 'kibana/test-plugin/.gitignore',
+ 'kibana/test-plugin/extra file only in zip.txt',
+ 'kibana/test-plugin/index.js',
+ 'kibana/test-plugin/package.json',
+ 'kibana/test-plugin/public/',
+ 'kibana/test-plugin/public/app.js',
+ 'kibana/test-plugin/README.md',
+ 'logstash/'
+ ];
+
+ expect(actual).to.eql(expected);
+ });
+ });
+
+ });
+
+ describe('extractFiles', function () {
+
+ it('extracts files using the files filter', function () {
+ return copyReplyFile('test_plugin_many.zip')
+ .then(() => {
+ const filter = {
+ files: [
+ 'kibana/funger-plugin/extra file only in zip.txt',
+ 'kibana/funger-plugin/index.js',
+ 'kibana\\funger-plugin\\package.json'
+ ]
+ };
+
+ return extractFiles(settings.tempArchiveFile, settings.workingPath, 0, filter);
+ })
+ .then(() => {
+ const files = glob.sync('**/*', { cwd: testWorkingPath });
+ const expected = [
+ 'kibana',
+ 'kibana/funger-plugin',
+ 'kibana/funger-plugin/extra file only in zip.txt',
+ 'kibana/funger-plugin/index.js',
+ 'kibana/funger-plugin/package.json',
+ 'archive.part'
+ ];
+ expect(files.sort()).to.eql(expected.sort());
+ });
+ });
+
+ it('extracts files using the paths filter', function () {
+ return copyReplyFile('test_plugin_many.zip')
+ .then(() => {
+ const filter = {
+ paths: [
+ 'kibana/funger-plugin',
+ 'kibana/test-plugin/public'
+ ]
+ };
+
+ return extractFiles(settings.tempArchiveFile, settings.workingPath, 0, filter);
+ })
+ .then(() => {
+ const files = glob.sync('**/*', { cwd: testWorkingPath });
+ const expected = [
+ 'archive.part',
+ 'kibana',
+ 'kibana/funger-plugin',
+ 'kibana/funger-plugin/README.md',
+ 'kibana/funger-plugin/extra file only in zip.txt',
+ 'kibana/funger-plugin/index.js',
+ 'kibana/funger-plugin/package.json',
+ 'kibana/funger-plugin/public',
+ 'kibana/funger-plugin/public/app.js',
+ 'kibana/test-plugin',
+ 'kibana/test-plugin/public',
+ 'kibana/test-plugin/public/app.js'
+ ];
+ expect(files.sort()).to.eql(expected.sort());
+ });
+ });
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/install/cleanup.js b/src/cli_plugin/install/cleanup.js
new file mode 100644
index 0000000000000..5e8f6fec58b30
--- /dev/null
+++ b/src/cli_plugin/install/cleanup.js
@@ -0,0 +1,32 @@
+import rimraf from 'rimraf';
+import fs from 'fs';
+
+export function cleanPrevious(settings, logger) {
+ return new Promise(function (resolve, reject) {
+ try {
+ fs.statSync(settings.workingPath);
+
+ logger.log('Found previous install attempt. Deleting...');
+ try {
+ rimraf.sync(settings.workingPath);
+ } catch (e) {
+ reject(e);
+ }
+ resolve();
+ } catch (e) {
+ if (e.code !== 'ENOENT') reject(e);
+
+ resolve();
+ }
+ });
+};
+
+export function cleanArtifacts(settings) {
+ // delete the working directory.
+ // At this point we're bailing, so swallow any errors on delete.
+ try {
+ rimraf.sync(settings.workingPath);
+ rimraf.sync(settings.plugins[0].path);
+ }
+ catch (e) {} // eslint-disable-line no-empty
+};
diff --git a/src/cli_plugin/install/download.js b/src/cli_plugin/install/download.js
new file mode 100644
index 0000000000000..871b170628fe8
--- /dev/null
+++ b/src/cli_plugin/install/download.js
@@ -0,0 +1,45 @@
+import downloadHttpFile from './downloaders/http';
+import downloadLocalFile from './downloaders/file';
+import { UnsupportedProtocolError } from '../lib/errors';
+import { parse } from 'url';
+
+export function _downloadSingle(settings, logger, sourceUrl) {
+ const urlInfo = parse(sourceUrl);
+ let downloadPromise;
+
+ if (/^file/.test(urlInfo.protocol)) {
+ downloadPromise = downloadLocalFile(logger, decodeURI(urlInfo.path), settings.tempArchiveFile);
+ } else if (/^https?/.test(urlInfo.protocol)) {
+ downloadPromise = downloadHttpFile(logger, sourceUrl, settings.tempArchiveFile, settings.timeout);
+ } else {
+ downloadPromise = Promise.reject(new UnsupportedProtocolError());
+ }
+
+ return downloadPromise;
+}
+
+//Attempts to download each url in turn until one is successful
+export function download(settings, logger) {
+ const urls = settings.urls.slice(0);
+
+ function tryNext() {
+ const sourceUrl = urls.shift();
+ if (!sourceUrl) {
+ throw new Error('No valid url specified.');
+ }
+
+ logger.log(`Attempting to transfer from ${sourceUrl}`);
+
+ return _downloadSingle(settings, logger, sourceUrl)
+ .catch((err) => {
+ const isUnsupportedProtocol = err instanceof UnsupportedProtocolError;
+ const isDownloadResourceNotFound = err.message === 'ENOTFOUND';
+ if (isUnsupportedProtocol || isDownloadResourceNotFound) {
+ return tryNext();
+ }
+ throw (err);
+ });
+ }
+
+ return tryNext();
+};
diff --git a/src/cli_plugin/install/downloaders/file.js b/src/cli_plugin/install/downloaders/file.js
new file mode 100644
index 0000000000000..505a103755e66
--- /dev/null
+++ b/src/cli_plugin/install/downloaders/file.js
@@ -0,0 +1,63 @@
+import Progress from '../progress';
+import { createWriteStream, createReadStream, unlinkSync, statSync } from 'fs';
+
+function openSourceFile({ sourcePath }) {
+ try {
+ let fileInfo = statSync(sourcePath);
+
+ const readStream = createReadStream(sourcePath);
+
+ return { readStream, fileInfo };
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ throw new Error('ENOTFOUND');
+ }
+
+ throw err;
+ }
+}
+
+async function copyFile({ readStream, writeStream, progress }) {
+ await new Promise((resolve, reject) => {
+ // if either stream errors, fail quickly
+ readStream.on('error', reject);
+ writeStream.on('error', reject);
+
+ // report progress as we transfer
+ readStream.on('data', (chunk) => {
+ progress.progress(chunk.length);
+ });
+
+ // write the download to the file system
+ readStream.pipe(writeStream);
+
+ // when the write is done, we are done
+ writeStream.on('finish', resolve);
+ });
+}
+
+/*
+// Responsible for managing local file transfers
+*/
+export default async function copyLocalFile(logger, sourcePath, targetPath) {
+ try {
+ const { readStream, fileInfo } = openSourceFile({ sourcePath });
+ const writeStream = createWriteStream(targetPath);
+
+ try {
+ const progress = new Progress(logger);
+ progress.init(fileInfo.size);
+
+ await copyFile({ readStream, writeStream, progress });
+
+ progress.complete();
+ } catch (err) {
+ readStream.close();
+ writeStream.close();
+ throw err;
+ }
+ } catch (err) {
+ logger.error(err);
+ throw err;
+ }
+};
diff --git a/src/cli_plugin/install/downloaders/http.js b/src/cli_plugin/install/downloaders/http.js
new file mode 100644
index 0000000000000..40069c4cd063e
--- /dev/null
+++ b/src/cli_plugin/install/downloaders/http.js
@@ -0,0 +1,73 @@
+import Wreck from 'wreck';
+import Progress from '../progress';
+import { fromNode as fn } from 'bluebird';
+import { createWriteStream, unlinkSync } from 'fs';
+
+function sendRequest({ sourceUrl, timeout }) {
+ const maxRedirects = 11; //Because this one goes to 11.
+ return fn(cb => {
+ const req = Wreck.request('GET', sourceUrl, { timeout, redirects: maxRedirects }, (err, resp) => {
+ if (err) {
+ if (err.code === 'ECONNREFUSED') {
+ err = new Error('ENOTFOUND');
+ }
+
+ return cb(err);
+ }
+
+ if (resp.statusCode >= 400) {
+ return cb(new Error('ENOTFOUND'));
+ }
+
+ cb(null, { req, resp });
+ });
+ });
+}
+
+function downloadResponse({ resp, targetPath, progress }) {
+ return new Promise((resolve, reject) => {
+ const writeStream = createWriteStream(targetPath);
+
+ // if either stream errors, fail quickly
+ resp.on('error', reject);
+ writeStream.on('error', reject);
+
+ // report progress as we download
+ resp.on('data', (chunk) => {
+ progress.progress(chunk.length);
+ });
+
+ // write the download to the file system
+ resp.pipe(writeStream);
+
+ // when the write is done, we are done
+ writeStream.on('finish', resolve);
+ });
+}
+
+/*
+Responsible for managing http transfers
+*/
+export default async function downloadUrl(logger, sourceUrl, targetPath, timeout) {
+ try {
+ const { req, resp } = await sendRequest({ sourceUrl, timeout });
+
+ try {
+ let totalSize = parseFloat(resp.headers['content-length']) || 0;
+ const progress = new Progress(logger);
+ progress.init(totalSize);
+
+ await downloadResponse({ resp, targetPath, progress });
+
+ progress.complete();
+ } catch (err) {
+ req.abort();
+ throw err;
+ }
+ } catch (err) {
+ if (err.message !== 'ENOTFOUND') {
+ logger.error(err);
+ }
+ throw err;
+ }
+};
diff --git a/src/cli_plugin/install/index.js b/src/cli_plugin/install/index.js
new file mode 100644
index 0000000000000..177666bca5144
--- /dev/null
+++ b/src/cli_plugin/install/index.js
@@ -0,0 +1,47 @@
+import { fromRoot } from '../../utils';
+import install from './install';
+import Logger from '../lib/logger';
+import pkg from '../../utils/package_json';
+import { parse, parseMilliseconds } from './settings';
+
+function processCommand(command, options) {
+ let settings;
+ try {
+ settings = parse(command, options, pkg);
+ } catch (ex) {
+ //The logger has not yet been initialized.
+ console.error(ex.message);
+ process.exit(64); // eslint-disable-line no-process-exit
+ }
+
+ const logger = new Logger(settings);
+ install(settings, logger);
+}
+
+export default function pluginInstall(program) {
+ program
+ .command('install ')
+ .option('-q, --quiet', 'disable all process messaging except errors')
+ .option('-s, --silent', 'disable all process messaging')
+ .option(
+ '-c, --config ',
+ 'path to the config file',
+ fromRoot('config/kibana.yml')
+ )
+ .option(
+ '-t, --timeout ',
+ 'length of time before failing; 0 for never fail',
+ parseMilliseconds
+ )
+ .option(
+ '-d, --plugin-dir ',
+ 'path to the directory where plugins are stored',
+ fromRoot('installedPlugins')
+ )
+ .description('install a plugin',
+`Common examples:
+ install x-pack
+ install file:///Path/to/my/x-pack.zip
+ install https://path.to/my/x-pack.zip`)
+ .action(processCommand);
+};
diff --git a/src/cli_plugin/install/install.js b/src/cli_plugin/install/install.js
new file mode 100644
index 0000000000000..1c72827cad71c
--- /dev/null
+++ b/src/cli_plugin/install/install.js
@@ -0,0 +1,40 @@
+import { download } from './download';
+import Promise from 'bluebird';
+import { cleanPrevious, cleanArtifacts } from './cleanup';
+import { extract, getPackData } from './pack';
+import { sync as rimrafSync } from 'rimraf';
+import { renameSync } from 'fs';
+import { existingInstall, rebuildCache, assertVersion } from './kibana';
+import mkdirp from 'mkdirp';
+
+const mkdir = Promise.promisify(mkdirp);
+
+export default async function install(settings, logger) {
+ try {
+ await cleanPrevious(settings, logger);
+
+ await mkdir(settings.workingPath);
+
+ await download(settings, logger);
+
+ await getPackData(settings, logger);
+
+ await extract(settings, logger);
+
+ rimrafSync(settings.tempArchiveFile);
+
+ existingInstall(settings, logger);
+
+ assertVersion(settings);
+
+ renameSync(settings.workingPath, settings.plugins[0].path);
+
+ await rebuildCache(settings, logger);
+
+ logger.log('Plugin installation complete');
+ } catch (err) {
+ logger.error(`Plugin installation was unsuccessful due to error "${err.message}"`);
+ cleanArtifacts(settings);
+ process.exit(70); // eslint-disable-line no-process-exit
+ }
+}
diff --git a/src/cli_plugin/install/kibana.js b/src/cli_plugin/install/kibana.js
new file mode 100644
index 0000000000000..c329992837c26
--- /dev/null
+++ b/src/cli_plugin/install/kibana.js
@@ -0,0 +1,59 @@
+import _ from 'lodash';
+import { fromRoot } from '../../utils';
+import KbnServer from '../../server/kbn_server';
+import readYamlConfig from '../../cli/serve/read_yaml_config';
+import { versionSatisfies, cleanVersion } from './version';
+import { statSync } from 'fs';
+
+export function existingInstall(settings, logger) {
+ try {
+ statSync(settings.plugins[0].path);
+
+ logger.error(`Plugin ${settings.plugins[0].name} already exists, please remove before installing a new version`);
+ process.exit(70); // eslint-disable-line no-process-exit
+ } catch (e) {
+ if (e.code !== 'ENOENT') throw e;
+ }
+}
+
+export async function rebuildCache(settings, logger) {
+ logger.log('Optimizing and caching browser bundles...');
+ const serverConfig = _.merge(
+ readYamlConfig(settings.config),
+ {
+ env: 'production',
+ logging: {
+ silent: settings.silent,
+ quiet: !settings.silent,
+ verbose: false
+ },
+ optimize: {
+ useBundleCache: false
+ },
+ server: {
+ autoListen: false
+ },
+ plugins: {
+ initialize: false,
+ scanDirs: [settings.pluginDir, fromRoot('src/plugins')]
+ }
+ }
+ );
+
+ const kbnServer = new KbnServer(serverConfig);
+ await kbnServer.ready();
+ await kbnServer.close();
+}
+
+export function assertVersion(settings) {
+ if (!settings.plugins[0].version) {
+ throw new Error (`Plugin version not found. Check package.json in archive`);
+ }
+
+ const actual = cleanVersion(settings.plugins[0].version);
+ const expected = cleanVersion(settings.version);
+ if (!versionSatisfies(actual, expected)) {
+ throw new Error (`Incorrect version in plugin [${settings.plugins[0].name}]. ` +
+ `Expected [${expected}]; found [${actual}]`);
+ }
+}
diff --git a/src/cli_plugin/install/pack.js b/src/cli_plugin/install/pack.js
new file mode 100644
index 0000000000000..5df302d5a86ff
--- /dev/null
+++ b/src/cli_plugin/install/pack.js
@@ -0,0 +1,137 @@
+import _ from 'lodash';
+import { listFiles, extractFiles } from './zip';
+import { resolve } from 'path';
+import { sync as rimrafSync } from 'rimraf';
+import validate from 'validate-npm-package-name';
+
+/**
+ * Returns an array of package objects. There will be one for each of
+ * package.json files in the archive
+ * @param {object} settings - a plugin installer settings object
+ */
+async function listPackages(settings) {
+ const regExp = new RegExp('(kibana/([^/]+))/package.json', 'i');
+ const archiveFiles = await listFiles(settings.tempArchiveFile);
+
+ return _(archiveFiles)
+ .map(file => file.replace(/\\/g, '/'))
+ .map(file => file.match(regExp))
+ .compact()
+ .map(([ file, _, folder ]) => ({ file, folder }))
+ .uniq()
+ .value();
+}
+
+/**
+ * Extracts the package.json files into the workingPath
+ * @param {object} settings - a plugin installer settings object
+ * @param {array} packages - array of package objects from listPackages()
+ */
+async function extractPackageFiles(settings, packages) {
+ const filter = {
+ files: packages.map((pkg) => pkg.file)
+ };
+ await extractFiles(settings.tempArchiveFile, settings.workingPath, 0, filter);
+}
+
+/**
+ * Deletes the package.json files created by extractPackageFiles()
+ * @param {object} settings - a plugin installer settings object
+ */
+function deletePackageFiles(settings) {
+ const fullPath = resolve(settings.workingPath, 'kibana');
+ rimrafSync(fullPath);
+}
+
+/**
+ * Checks the plugin name. Will throw an exception if it does not meet
+ * npm package naming conventions
+ * @param {object} plugin - a package object from listPackages()
+ */
+function assertValidPackageName(plugin) {
+ const validation = validate(plugin.name);
+ if (!validation.validForNewPackages) {
+ throw new Error(`Invalid plugin name [${plugin.name}] in package.json`);
+ }
+}
+
+
+/**
+ * Examine each package.json file to determine the plugin name,
+ * version, and platform. Mutates the package objects in the packages array
+ * @param {object} settings - a plugin installer settings object
+ * @param {array} packages - array of package objects from listPackages()
+ */
+async function mergePackageData(settings, packages) {
+ return packages.map((pkg) => {
+ const fullPath = resolve(settings.workingPath, pkg.file);
+ const packageInfo = require(fullPath);
+
+ pkg.version = _.get(packageInfo, 'version');
+ pkg.name = _.get(packageInfo, 'name');
+ pkg.path = resolve(settings.pluginDir, pkg.name);
+
+ const regExp = new RegExp(`${pkg.name}-(.+)`, 'i');
+ const matches = pkg.folder.match(regExp);
+ pkg.platform = (matches) ? matches[1] : undefined;
+
+ return pkg;
+ });
+}
+
+/**
+ * Extracts the first plugin in the archive.
+ * NOTE: This will need to be changed in later versions of the pack installer
+ * that allow for the installation of more than one plugin at once.
+ * @param {object} settings - a plugin installer settings object
+ */
+async function extractArchive(settings) {
+ const filter = {
+ paths: [ `kibana/${settings.plugins[0].folder}` ]
+ };
+
+ await extractFiles(settings.tempArchiveFile, settings.workingPath, 2, filter);
+}
+
+
+/**
+ * Returns the detailed information about each kibana plugin in the pack.
+ * TODO: If there are platform specific folders, determine which one to use.
+ * @param {object} settings - a plugin installer settings object
+ * @param {object} logger - a plugin installer logger object
+ */
+export async function getPackData(settings, logger) {
+ let packages;
+ try {
+ logger.log('Retrieving metadata from plugin archive');
+
+ packages = await listPackages(settings);
+
+ await extractPackageFiles(settings, packages);
+ await mergePackageData(settings, packages);
+ await deletePackageFiles(settings);
+ } catch (err) {
+ logger.error(err);
+ throw new Error('Error retrieving metadata from plugin archive');
+ }
+
+ if (packages.length === 0) {
+ throw new Error('No kibana plugins found in archive');
+ }
+ packages.forEach(assertValidPackageName);
+
+ settings.plugins = packages;
+}
+
+export async function extract(settings, logger) {
+ try {
+ logger.log('Extracting plugin archive');
+
+ await extractArchive(settings);
+
+ logger.log('Extraction complete');
+ } catch (err) {
+ logger.error(err);
+ throw new Error('Error extracting plugin archive');
+ }
+};
diff --git a/src/cli_plugin/install/progress.js b/src/cli_plugin/install/progress.js
new file mode 100644
index 0000000000000..dca7f9a2b88fc
--- /dev/null
+++ b/src/cli_plugin/install/progress.js
@@ -0,0 +1,38 @@
+/**
+ * Generates file transfer progress messages
+ */
+export default class Progress {
+
+ constructor(logger) {
+ const self = this;
+
+ self.dotCount = 0;
+ self.runningTotal = 0;
+ self.totalSize = 0;
+ self.logger = logger;
+ }
+
+ init(size) {
+ this.totalSize = size;
+ const totalDesc = this.totalSize || 'unknown number of';
+
+ this.logger.log(`Transferring ${totalDesc} bytes`, true);
+ }
+
+ progress(size) {
+ if (!this.totalSize) return;
+
+ this.runningTotal += size;
+ let newDotCount = Math.round(this.runningTotal / this.totalSize * 100 / 5);
+ if (newDotCount > 20) newDotCount = 20;
+ for (let i = 0; i < (newDotCount - this.dotCount); i++) {
+ this.logger.log('.', true);
+ }
+ this.dotCount = newDotCount;
+ }
+
+ complete() {
+ this.logger.log(`Transfer complete`, false);
+ }
+
+}
diff --git a/src/cli_plugin/install/settings.js b/src/cli_plugin/install/settings.js
new file mode 100644
index 0000000000000..0113affe2ec91
--- /dev/null
+++ b/src/cli_plugin/install/settings.js
@@ -0,0 +1,47 @@
+import expiry from 'expiry-js';
+import { intersection } from 'lodash';
+import { resolve } from 'path';
+import { arch, platform } from 'os';
+
+function generateUrls({ version, plugin }) {
+ return [
+ plugin,
+ `https://download.elastic.co/kibana/${plugin}/${plugin}-${version}.zip`
+ ];
+}
+
+export function parseMilliseconds(val) {
+ let result;
+
+ try {
+ const timeVal = expiry(val);
+ result = timeVal.asMilliseconds();
+ } catch (ex) {
+ result = 0;
+ }
+
+ return result;
+};
+
+export function parse(command, options, kbnPackage) {
+ const settings = {
+ timeout: options.timeout || 0,
+ quiet: options.quiet || false,
+ silent: options.silent || false,
+ config: options.config || '',
+ plugin: command,
+ version: kbnPackage.version,
+ pluginDir: options.pluginDir || ''
+ };
+
+ settings.urls = generateUrls(settings);
+ settings.workingPath = resolve(settings.pluginDir, '.plugin.installing');
+ settings.tempArchiveFile = resolve(settings.workingPath, 'archive.part');
+ settings.tempPackageFile = resolve(settings.workingPath, 'package.json');
+ settings.setPlugin = function (plugin) {
+ settings.plugin = plugin;
+ settings.pluginPath = resolve(settings.pluginDir, settings.plugin.name);
+ };
+
+ return settings;
+};
diff --git a/src/cli_plugin/install/version.js b/src/cli_plugin/install/version.js
new file mode 100644
index 0000000000000..a2a37d0dd23b4
--- /dev/null
+++ b/src/cli_plugin/install/version.js
@@ -0,0 +1,15 @@
+import semver from 'semver';
+
+export function versionSatisfies(cleanActual, cleanExpected) {
+ try {
+ return (cleanActual === cleanExpected);
+ } catch (err) {
+ return false;
+ }
+}
+
+export function cleanVersion(version) {
+ const match = version.match(/\d+\.\d+\.\d+/);
+ if (!match) return version;
+ return match[0];
+}
diff --git a/src/cli_plugin/install/zip.js b/src/cli_plugin/install/zip.js
new file mode 100644
index 0000000000000..38dd82f013c4b
--- /dev/null
+++ b/src/cli_plugin/install/zip.js
@@ -0,0 +1,94 @@
+import _ from 'lodash';
+import DecompressZip from '@bigfunger/decompress-zip';
+
+const SYMBOLIC_LINK = 'SymbolicLink';
+
+/**
+ * Creates a filter function to be consumed by extractFiles that filters by
+ * an array of files
+ * @param {array} files - an array of full file paths to extract. Should match
+ * exactly a value from listFiles
+ */
+function extractFilterFromFiles(files) {
+ const filterFiles = files.map((file) => file.replace(/\\/g, '/'));
+ return function filterByFiles(file) {
+ if (file.type === SYMBOLIC_LINK) return false;
+
+ const path = file.path.replace(/\\/g, '/');
+ return _.includes(filterFiles, path);
+ };
+}
+
+/**
+ * Creates a filter function to be consumed by extractFiles that filters by
+ * an array of root paths
+ * @param {array} paths - an array of root paths from the archive. All files and
+ * folders will be extracted recursively using these paths as roots.
+ */
+function extractFilterFromPaths(paths) {
+ return function filterByRootPath(file) {
+ if (file.type === SYMBOLIC_LINK) return false;
+
+ return paths.some(path => {
+ const regex = new RegExp(`${path}($|/)`, 'i');
+ return file.parent.match(regex);
+ });
+ };
+}
+
+/**
+ * Creates a filter function to be consumed by extractFiles
+ * @param {object} filter - an object with either a files or paths property.
+ */
+function extractFilter(filter) {
+ if (filter.files) return extractFilterFromFiles(filter.files);
+ if (filter.paths) return extractFilterFromPaths(filter.paths);
+ return _.noop;
+}
+
+/**
+ * Extracts files from a zip archive to a file path using a filter function
+ * @param {string} zipPath - file path to a zip archive
+ * @param {string} targetPath - directory path to where the files should
+ * extracted
+ * @param {integer} strip - Number of nested directories within the archive
+ * that should be ignored when determining the target path of an archived
+ * file.
+ * @param {function} filter - A function that accepts a single parameter 'file'
+ * and returns true if the file should be extracted from the archive
+ */
+export async function extractFiles(zipPath, targetPath, strip, filter) {
+ await new Promise((resolve, reject) => {
+ const unzipper = new DecompressZip(zipPath);
+
+ unzipper.on('error', reject);
+
+ unzipper.extract({
+ path: targetPath,
+ strip: strip,
+ filter: extractFilter(filter)
+ });
+
+ unzipper.on('extract', resolve);
+ });
+}
+
+/**
+ * Returns all files within an archive
+ * @param {string} zipPath - file path to a zip archive
+ * @returns {array} all files within an archive with their relative paths
+ */
+export async function listFiles(zipPath) {
+ return await new Promise((resolve, reject) => {
+ const unzipper = new DecompressZip(zipPath);
+
+ unzipper.on('error', reject);
+
+ unzipper.on('list', (files) => {
+ files = files.map((file) => file.replace(/\\/g, '/'));
+ resolve(files);
+ });
+
+ unzipper.list();
+ });
+}
diff --git a/src/cli_plugin/lib/__tests__/logger.js b/src/cli_plugin/lib/__tests__/logger.js
new file mode 100644
index 0000000000000..26bcca9ea4886
--- /dev/null
+++ b/src/cli_plugin/lib/__tests__/logger.js
@@ -0,0 +1,126 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import Logger from '../logger';
+
+describe('kibana cli', function () {
+
+ describe('plugin installer', function () {
+
+ describe('logger', function () {
+ let logger;
+
+ describe('logger.log', function () {
+
+ beforeEach(function () {
+ sinon.spy(process.stdout, 'write');
+ });
+
+ afterEach(function () {
+ process.stdout.write.restore();
+ });
+
+ it('should log messages to the console and append a new line', function () {
+ logger = new Logger({ silent: false, quiet: false });
+ const message = 'this is my message';
+
+ logger.log(message);
+
+ const callCount = process.stdout.write.callCount;
+ expect(process.stdout.write.getCall(callCount - 2).args[0]).to.be(message);
+ expect(process.stdout.write.getCall(callCount - 1).args[0]).to.be('\n');
+ });
+
+ it('should log messages to the console and append not append a new line', function () {
+ logger = new Logger({ silent: false, quiet: false });
+ for (let i = 0; i < 10; i++) {
+ logger.log('.', true);
+ }
+ logger.log('Done!');
+
+ expect(process.stdout.write.callCount).to.be(13);
+ expect(process.stdout.write.getCall(0).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(1).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(2).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(3).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(4).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(5).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(6).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(7).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(8).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(9).args[0]).to.be('.');
+ expect(process.stdout.write.getCall(10).args[0]).to.be('\n');
+ expect(process.stdout.write.getCall(11).args[0]).to.be('Done!');
+ expect(process.stdout.write.getCall(12).args[0]).to.be('\n');
+ });
+
+ it('should not log any messages when quiet is set', function () {
+ logger = new Logger({ silent: false, quiet: true });
+
+ const message = 'this is my message';
+ logger.log(message);
+
+ for (let i = 0; i < 10; i++) {
+ logger.log('.', true);
+ }
+ logger.log('Done!');
+
+ expect(process.stdout.write.callCount).to.be(0);
+ });
+
+ it('should not log any messages when silent is set', function () {
+ logger = new Logger({ silent: true, quiet: false });
+
+ const message = 'this is my message';
+ logger.log(message);
+
+ for (let i = 0; i < 10; i++) {
+ logger.log('.', true);
+ }
+ logger.log('Done!');
+
+ expect(process.stdout.write.callCount).to.be(0);
+ });
+
+ });
+
+ describe('logger.error', function () {
+
+ beforeEach(function () {
+ sinon.spy(process.stderr, 'write');
+ });
+
+ afterEach(function () {
+ process.stderr.write.restore();
+ });
+
+ it('should log error messages to the console and append a new line', function () {
+ logger = new Logger({ silent: false, quiet: false });
+ const message = 'this is my error';
+
+ logger.error(message);
+ expect(process.stderr.write.calledWith(message + '\n')).to.be(true);
+ });
+
+ it('should log error messages to the console when quiet is set', function () {
+ logger = new Logger({ silent: false, quiet: true });
+ const message = 'this is my error';
+
+ logger.error(message);
+ expect(process.stderr.write.calledWith(message + '\n')).to.be(true);
+ });
+
+ it('should not log any error messages when silent is set', function () {
+ logger = new Logger({ silent: true, quiet: false });
+ const message = 'this is my error';
+
+ logger.error(message);
+ expect(process.stderr.write.callCount).to.be(0);
+ });
+
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/lib/errors.js b/src/cli_plugin/lib/errors.js
new file mode 100644
index 0000000000000..9bcdac145ca55
--- /dev/null
+++ b/src/cli_plugin/lib/errors.js
@@ -0,0 +1 @@
+export class UnsupportedProtocolError extends Error {};
diff --git a/src/cli_plugin/lib/logger.js b/src/cli_plugin/lib/logger.js
new file mode 100644
index 0000000000000..16bc15f33e026
--- /dev/null
+++ b/src/cli_plugin/lib/logger.js
@@ -0,0 +1,46 @@
+/**
+ * Logs messages and errors
+ */
+export default class Logger {
+
+ constructor(settings) {
+ this.previousLineEnded = true;
+ this.silent = !!settings.silent;
+ this.quiet = !!settings.quiet;
+ }
+
+ log(data, sameLine) {
+ if (this.silent || this.quiet) return;
+
+ if (!sameLine && !this.previousLineEnded) {
+ process.stdout.write('\n');
+ }
+
+ //if data is a stream, pipe it.
+ if (data.readable) {
+ data.pipe(process.stdout);
+ return;
+ }
+
+ process.stdout.write(data);
+ if (!sameLine) process.stdout.write('\n');
+ this.previousLineEnded = !sameLine;
+ }
+
+ error(data) {
+ if (this.silent) return;
+
+ if (!this.previousLineEnded) {
+ process.stderr.write('\n');
+ }
+
+ //if data is a stream, pipe it.
+ if (data.readable) {
+ data.pipe(process.stderr);
+ return;
+ }
+ process.stderr.write(`${data}\n`);
+ this.previousLineEnded = true;
+ };
+
+}
diff --git a/src/cli_plugin/list/__tests__/list.js b/src/cli_plugin/list/__tests__/list.js
new file mode 100644
index 0000000000000..58ba9dfdc7985
--- /dev/null
+++ b/src/cli_plugin/list/__tests__/list.js
@@ -0,0 +1,102 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import rimraf from 'rimraf';
+import mkdirp from 'mkdirp';
+import Logger from '../../lib/logger';
+import list from '../list';
+import { join } from 'path';
+import { writeFileSync, appendFileSync } from 'fs';
+
+
+function createPlugin(name, version, pluginBaseDir) {
+ const pluginDir = join(pluginBaseDir, name);
+ mkdirp.sync(pluginDir);
+ appendFileSync(join(pluginDir, 'package.json'), '{"version": "' + version + '"}');
+}
+
+describe('kibana cli', function () {
+
+ describe('plugin lister', function () {
+
+ const pluginDir = join(__dirname, '.test.data');
+ let logger;
+
+ const settings = {
+ pluginDir: pluginDir
+ };
+
+ beforeEach(function () {
+ logger = new Logger(settings);
+ sinon.stub(logger, 'log');
+ sinon.stub(logger, 'error');
+ rimraf.sync(pluginDir);
+ mkdirp.sync(pluginDir);
+ });
+
+ afterEach(function () {
+ logger.log.restore();
+ logger.error.restore();
+ rimraf.sync(pluginDir);
+ });
+
+ it('list all of the folders in the plugin folder', function () {
+ createPlugin('plugin1', '5.0.0-alpha2', pluginDir);
+ createPlugin('plugin2', '3.2.1', pluginDir);
+ createPlugin('plugin3', '1.2.3', pluginDir);
+
+ list(settings, logger);
+
+ expect(logger.log.calledWith('plugin1@5.0.0-alpha2')).to.be(true);
+ expect(logger.log.calledWith('plugin2@3.2.1')).to.be(true);
+ expect(logger.log.calledWith('plugin3@1.2.3')).to.be(true);
+ });
+
+ it('ignore folders that start with a period', function () {
+ createPlugin('.foo', '1.0.0', pluginDir);
+ createPlugin('plugin1', '5.0.0-alpha2', pluginDir);
+ createPlugin('plugin2', '3.2.1', pluginDir);
+ createPlugin('plugin3', '1.2.3', pluginDir);
+ createPlugin('.bar', '1.0.0', pluginDir);
+
+ list(settings, logger);
+
+ expect(logger.log.calledWith('.foo@1.0.0')).to.be(false);
+ expect(logger.log.calledWith('.bar@1.0.0')).to.be(false);
+ });
+
+ it('list should only list folders', function () {
+ createPlugin('plugin1', '1.0.0', pluginDir);
+ createPlugin('plugin2', '1.0.0', pluginDir);
+ createPlugin('plugin3', '1.0.0', pluginDir);
+ writeFileSync(join(pluginDir, 'plugin4'), 'This is a file, and not a folder.');
+
+ list(settings, logger);
+
+ expect(logger.log.calledWith('plugin1@1.0.0')).to.be(true);
+ expect(logger.log.calledWith('plugin2@1.0.0')).to.be(true);
+ expect(logger.log.calledWith('plugin3@1.0.0')).to.be(true);
+ });
+
+ it('list should throw an exception if a plugin does not have a package.json', function () {
+ createPlugin('plugin1', '1.0.0', pluginDir);
+ mkdirp.sync(join(pluginDir, 'empty-plugin'));
+
+ expect(function () {
+ list(settings, logger);
+ }).to.throwError('Unable to read package.json file for plugin empty-plugin');
+ });
+
+ it('list should throw an exception if a plugin have an empty package.json', function () {
+ createPlugin('plugin1', '1.0.0', pluginDir);
+ const invalidPluginDir = join(pluginDir, 'invalid-plugin');
+ mkdirp.sync(invalidPluginDir);
+ appendFileSync(join(invalidPluginDir, 'package.json'), '');
+
+ expect(function () {
+ list(settings, logger);
+ }).to.throwError('Unable to read package.json file for plugin invalid-plugin');
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/list/__tests__/settings.js b/src/cli_plugin/list/__tests__/settings.js
new file mode 100644
index 0000000000000..8e08e5b17ad5e
--- /dev/null
+++ b/src/cli_plugin/list/__tests__/settings.js
@@ -0,0 +1,44 @@
+import path from 'path';
+import expect from 'expect.js';
+import fromRoot from '../../../utils/from_root';
+import { resolve } from 'path';
+import { parseMilliseconds, parse } from '../settings';
+
+describe('kibana cli', function () {
+
+ describe('plugin installer', function () {
+
+ describe('command line option parsing', function () {
+
+ describe('parse function', function () {
+
+ let command;
+ const options = {};
+ beforeEach(function () {
+ command = { pluginDir: fromRoot('installedPlugins') };
+ });
+
+ describe('pluginDir option', function () {
+
+ it('should default to installedPlugins', function () {
+ const settings = parse(command, options);
+
+ expect(settings.pluginDir).to.be(fromRoot('installedPlugins'));
+ });
+
+ it('should set settings.config property', function () {
+ command.pluginDir = 'foo bar baz';
+ const settings = parse(command, options);
+
+ expect(settings.pluginDir).to.be('foo bar baz');
+ });
+
+ });
+
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/list/index.js b/src/cli_plugin/list/index.js
new file mode 100644
index 0000000000000..e7d8320c65e35
--- /dev/null
+++ b/src/cli_plugin/list/index.js
@@ -0,0 +1,30 @@
+import { fromRoot } from '../../utils';
+import list from './list';
+import Logger from '../lib/logger';
+import { parse } from './settings';
+
+function processCommand(command, options) {
+ let settings;
+ try {
+ settings = parse(command, options);
+ } catch (ex) {
+ //The logger has not yet been initialized.
+ console.error(ex.message);
+ process.exit(64); // eslint-disable-line no-process-exit
+ }
+
+ const logger = new Logger(settings);
+ list(settings, logger);
+}
+
+export default function pluginList(program) {
+ program
+ .command('list')
+ .option(
+ '-d, --plugin-dir ',
+ 'path to the directory where plugins are stored',
+ fromRoot('installedPlugins')
+ )
+ .description('list installed plugins')
+ .action(processCommand);
+};
diff --git a/src/cli_plugin/list/list.js b/src/cli_plugin/list/list.js
new file mode 100644
index 0000000000000..80146ed128c10
--- /dev/null
+++ b/src/cli_plugin/list/list.js
@@ -0,0 +1,20 @@
+import { statSync, readdirSync, readFileSync } from 'fs';
+import { join } from 'path';
+
+export default function list(settings, logger) {
+ readdirSync(settings.pluginDir)
+ .forEach((filename) => {
+ const stat = statSync(join(settings.pluginDir, filename));
+
+ if (stat.isDirectory() && filename[0] !== '.') {
+ try {
+ const packagePath = join(settings.pluginDir, filename, 'package.json');
+ const { version } = JSON.parse(readFileSync(packagePath, 'utf8'));
+ logger.log(filename + '@' + version);
+ } catch (e) {
+ throw new Error('Unable to read package.json file for plugin ' + filename);
+ }
+ }
+ });
+ logger.log(''); //intentional blank line for aesthetics
+}
diff --git a/src/cli_plugin/list/settings.js b/src/cli_plugin/list/settings.js
new file mode 100644
index 0000000000000..f372bfc0d35eb
--- /dev/null
+++ b/src/cli_plugin/list/settings.js
@@ -0,0 +1,9 @@
+import { resolve } from 'path';
+
+export function parse(command, options) {
+ const settings = {
+ pluginDir: command.pluginDir || ''
+ };
+
+ return settings;
+};
diff --git a/src/cli_plugin/remove/__tests__/remove.js b/src/cli_plugin/remove/__tests__/remove.js
new file mode 100644
index 0000000000000..567f67c33c9ac
--- /dev/null
+++ b/src/cli_plugin/remove/__tests__/remove.js
@@ -0,0 +1,68 @@
+import expect from 'expect.js';
+import sinon from 'sinon';
+import glob from 'glob-all';
+import rimraf from 'rimraf';
+import mkdirp from 'mkdirp';
+import Logger from '../../lib/logger';
+import remove from '../remove';
+import { join } from 'path';
+import { writeFileSync } from 'fs';
+
+describe('kibana cli', function () {
+
+ describe('plugin remover', function () {
+
+ const pluginDir = join(__dirname, '.test.data');
+ let processExitStub;
+ let logger;
+
+ const settings = { pluginDir };
+
+ beforeEach(function () {
+ processExitStub = sinon.stub(process, 'exit');
+ logger = new Logger(settings);
+ sinon.stub(logger, 'log');
+ sinon.stub(logger, 'error');
+ rimraf.sync(pluginDir);
+ mkdirp.sync(pluginDir);
+ });
+
+ afterEach(function () {
+ processExitStub.restore();
+ logger.log.restore();
+ logger.error.restore();
+ rimraf.sync(pluginDir);
+ });
+
+ it('throw an error if the plugin is not installed.', function () {
+ settings.pluginPath = join(pluginDir, 'foo');
+ settings.plugin = 'foo';
+
+ remove(settings, logger);
+ expect(logger.error.firstCall.args[0]).to.match(/not installed/);
+ expect(process.exit.called).to.be(true);
+ });
+
+ it('throw an error if the specified plugin is not a folder.', function () {
+ writeFileSync(join(pluginDir, 'foo'), 'This is a file, and not a folder.');
+
+ remove(settings, logger);
+ expect(logger.error.firstCall.args[0]).to.match(/not a plugin/);
+ expect(process.exit.called).to.be(true);
+ });
+
+ it('delete the specified folder.', function () {
+ settings.pluginPath = join(pluginDir, 'foo');
+ mkdirp.sync(join(pluginDir, 'foo'));
+ mkdirp.sync(join(pluginDir, 'bar'));
+
+ remove(settings, logger);
+
+ const files = glob.sync('**/*', { cwd: pluginDir });
+ const expected = ['bar'];
+ expect(files.sort()).to.eql(expected.sort());
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/remove/__tests__/settings.js b/src/cli_plugin/remove/__tests__/settings.js
new file mode 100644
index 0000000000000..b87600d959680
--- /dev/null
+++ b/src/cli_plugin/remove/__tests__/settings.js
@@ -0,0 +1,106 @@
+import path from 'path';
+import expect from 'expect.js';
+import fromRoot from '../../../utils/from_root';
+import { resolve } from 'path';
+import { parseMilliseconds, parse } from '../settings';
+
+describe('kibana cli', function () {
+
+ describe('plugin installer', function () {
+
+ describe('command line option parsing', function () {
+
+ describe('parse function', function () {
+
+ const command = 'plugin name';
+ let options = {};
+ const kbnPackage = { version: 1234 };
+ beforeEach(function () {
+ options = { pluginDir: fromRoot('installedPlugins') };
+ });
+
+ describe('quiet option', function () {
+
+ it('should default to false', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.quiet).to.be(false);
+ });
+
+ it('should set settings.quiet property to true', function () {
+ options.quiet = true;
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.quiet).to.be(true);
+ });
+
+ });
+
+ describe('silent option', function () {
+
+ it('should default to false', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.silent).to.be(false);
+ });
+
+ it('should set settings.silent property to true', function () {
+ options.silent = true;
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.silent).to.be(true);
+ });
+
+ });
+
+ describe('config option', function () {
+
+ it('should default to ZLS', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.config).to.be('');
+ });
+
+ it('should set settings.config property', function () {
+ options.config = 'foo bar baz';
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.config).to.be('foo bar baz');
+ });
+
+ });
+
+ describe('pluginDir option', function () {
+
+ it('should default to installedPlugins', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.pluginDir).to.be(fromRoot('installedPlugins'));
+ });
+
+ it('should set settings.config property', function () {
+ options.pluginDir = 'foo bar baz';
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.pluginDir).to.be('foo bar baz');
+ });
+
+ });
+
+ describe('command value', function () {
+
+ it('should set settings.plugin property', function () {
+ const settings = parse(command, options, kbnPackage);
+
+ expect(settings.plugin).to.be(command);
+ });
+
+ });
+
+ });
+
+ });
+
+ });
+
+});
diff --git a/src/cli_plugin/remove/index.js b/src/cli_plugin/remove/index.js
new file mode 100644
index 0000000000000..b763e6882fc6d
--- /dev/null
+++ b/src/cli_plugin/remove/index.js
@@ -0,0 +1,39 @@
+import { fromRoot } from '../../utils';
+import remove from './remove';
+import Logger from '../lib/logger';
+import { parse } from './settings';
+
+function processCommand(command, options) {
+ let settings;
+ try {
+ settings = parse(command, options);
+ } catch (ex) {
+ //The logger has not yet been initialized.
+ console.error(ex.message);
+ process.exit(64); // eslint-disable-line no-process-exit
+ }
+
+ const logger = new Logger(settings);
+ remove(settings, logger);
+}
+
+export default function pluginRemove(program) {
+ program
+ .command('remove ')
+ .option('-q, --quiet', 'disable all process messaging except errors')
+ .option('-s, --silent', 'disable all process messaging')
+ .option(
+ '-c, --config ',
+ 'path to the config file',
+ fromRoot('config/kibana.yml')
+ )
+ .option(
+ '-d, --plugin-dir ',
+ 'path to the directory where plugins are stored',
+ fromRoot('installedPlugins')
+ )
+ .description('remove a plugin',
+`common examples:
+ remove x-pack`)
+ .action(processCommand);
+};
diff --git a/src/cli_plugin/remove/remove.js b/src/cli_plugin/remove/remove.js
new file mode 100644
index 0000000000000..4fa40076129e9
--- /dev/null
+++ b/src/cli_plugin/remove/remove.js
@@ -0,0 +1,23 @@
+import { statSync } from 'fs';
+import rimraf from 'rimraf';
+
+export default function remove(settings, logger) {
+ try {
+ let stat;
+ try {
+ stat = statSync(settings.pluginPath);
+ } catch (e) {
+ throw new Error(`Plugin [${settings.plugin}] is not installed`);
+ }
+
+ if (!stat.isDirectory()) {
+ throw new Error(`[${settings.plugin}] is not a plugin`);
+ }
+
+ logger.log(`Removing ${settings.plugin}...`);
+ rimraf.sync(settings.pluginPath);
+ } catch (err) {
+ logger.error(`Unable to remove plugin because of error: "${err.message}"`);
+ process.exit(74); // eslint-disable-line no-process-exit
+ }
+}
diff --git a/src/cli_plugin/remove/settings.js b/src/cli_plugin/remove/settings.js
new file mode 100644
index 0000000000000..7e7ed37d2e5a9
--- /dev/null
+++ b/src/cli_plugin/remove/settings.js
@@ -0,0 +1,15 @@
+import { resolve } from 'path';
+
+export function parse(command, options) {
+ const settings = {
+ quiet: options.quiet || false,
+ silent: options.silent || false,
+ config: options.config || '',
+ pluginDir: options.pluginDir || '',
+ plugin: command
+ };
+
+ settings.pluginPath = resolve(settings.pluginDir, settings.plugin);
+
+ return settings;
+};
diff --git a/src/fixtures/agg_resp/geohash_grid.js b/src/fixtures/agg_resp/geohash_grid.js
index df9b6513de6b9..33a446ff360b2 100644
--- a/src/fixtures/agg_resp/geohash_grid.js
+++ b/src/fixtures/agg_resp/geohash_grid.js
@@ -1,83 +1,81 @@
-define(function (require) {
- return function GeoHashGridAggResponseFixture() {
+import _ from 'lodash';
+export default function GeoHashGridAggResponseFixture() {
- var _ = require('lodash');
- // for vis:
- //
- // vis = new Vis(indexPattern, {
- // type: 'tile_map',
- // aggs:[
- // { schema: 'metric', type: 'avg', params: { field: 'bytes' } },
- // { schema: 'split', type: 'terms', params: { field: '@tags', size: 10 } },
- // { schema: 'segment', type: 'geohash_grid', params: { field: 'geo.coordinates', precision: 3 } }
- // ],
- // params: {
- // isDesaturated: true,
- // mapType: 'Scaled%20Circle%20Markers'
- // },
- // });
+ // for vis:
+ //
+ // vis = new Vis(indexPattern, {
+ // type: 'tile_map',
+ // aggs:[
+ // { schema: 'metric', type: 'avg', params: { field: 'bytes' } },
+ // { schema: 'split', type: 'terms', params: { field: '@tags', size: 10 } },
+ // { schema: 'segment', type: 'geohash_grid', params: { field: 'geo.coordinates', precision: 3 } }
+ // ],
+ // params: {
+ // isDesaturated: true,
+ // mapType: 'Scaled%20Circle%20Markers'
+ // },
+ // });
- var geoHashCharts = _.union(
- _.range(48, 57), // 0-9
- _.range(65, 90), // A-Z
- _.range(97, 122) // a-z
- );
+ let geoHashCharts = _.union(
+ _.range(48, 57), // 0-9
+ _.range(65, 90), // A-Z
+ _.range(97, 122) // a-z
+ );
- var totalDocCount = 0;
+ let totalDocCount = 0;
- var tags = _.times(_.random(4, 20), function (i) {
- // random number of tags
- var docCount = 0;
- var buckets = _.times(_.random(40, 200), function () {
- return _.sample(geoHashCharts, 3).join('');
- })
- .sort()
- .map(function (geoHash) {
- var count = _.random(1, 5000);
+ let tags = _.times(_.random(4, 20), function (i) {
+ // random number of tags
+ let docCount = 0;
+ let buckets = _.times(_.random(40, 200), function () {
+ return _.sample(geoHashCharts, 3).join('');
+ })
+ .sort()
+ .map(function (geoHash) {
+ let count = _.random(1, 5000);
- totalDocCount += count;
- docCount += count;
-
- return {
- key: geoHash,
- doc_count: count,
- 1: {
- value: 2048 + i
- }
- };
- });
+ totalDocCount += count;
+ docCount += count;
return {
- key: 'tag ' + (i + 1),
- doc_count: docCount,
- 3: {
- buckets: buckets
- },
+ key: geoHash,
+ doc_count: count,
1: {
- value: 1000 + i
+ value: 2048 + i
}
};
});
return {
- took: 3,
- timed_out: false,
- _shards: {
- total: 4,
- successful: 4,
- failed: 0
- },
- hits: {
- total: 298,
- max_score: 0.0,
- hits: []
+ key: 'tag ' + (i + 1),
+ doc_count: docCount,
+ 3: {
+ buckets: buckets
},
- aggregations: {
- 2: {
- buckets: tags
- }
+ 1: {
+ value: 1000 + i
}
};
+ });
+
+ return {
+ took: 3,
+ timed_out: false,
+ _shards: {
+ total: 4,
+ successful: 4,
+ failed: 0
+ },
+ hits: {
+ total: 298,
+ max_score: 0.0,
+ hits: []
+ },
+ aggregations: {
+ 2: {
+ buckets: tags
+ }
+ }
};
-});
+};
diff --git a/src/fixtures/agg_resp/range.js b/src/fixtures/agg_resp/range.js
index c571d1e1d2ae1..f857951c27051 100644
--- a/src/fixtures/agg_resp/range.js
+++ b/src/fixtures/agg_resp/range.js
@@ -1,4 +1,4 @@
-module.exports = {
+export default {
"took": 35,
"timed_out": false,
"_shards": {
diff --git a/src/fixtures/fake_chart_events.js b/src/fixtures/fake_chart_events.js
index f011fbade44f9..383a61c900815 100644
--- a/src/fixtures/fake_chart_events.js
+++ b/src/fixtures/fake_chart_events.js
@@ -1,22 +1,20 @@
-define(function (require) {
- var results = {};
+let results = {};
- results.timeSeries = {
- data: {
- ordered: {
- date: true,
- interval: 600000,
- max: 1414437217559,
- min: 1414394017559
- }
- },
- label: 'apache',
- value: 44,
- point: {
- label: 'apache',
- x: 1414400400000,
- y: 44,
- y0: 0
+results.timeSeries = {
+ data: {
+ ordered: {
+ date: true,
+ interval: 600000,
+ max: 1414437217559,
+ min: 1414394017559
}
- };
-});
+ },
+ label: 'apache',
+ value: 44,
+ point: {
+ label: 'apache',
+ x: 1414400400000,
+ y: 44,
+ y0: 0
+ }
+};
diff --git a/src/fixtures/fake_hierarchical_data.js b/src/fixtures/fake_hierarchical_data.js
index 36797d0773e50..2b1096cc9c271 100644
--- a/src/fixtures/fake_hierarchical_data.js
+++ b/src/fixtures/fake_hierarchical_data.js
@@ -1,228 +1,226 @@
-define(function (require) {
- var data = { };
+let data = { };
- data.metricOnly = {
- hits: { total: 1000, hits: [], max_score: 0 },
- aggregations: {
- agg_1: { value: 412032 },
- }
- };
+data.metricOnly = {
+ hits: { total: 1000, hits: [], max_score: 0 },
+ aggregations: {
+ agg_1: { value: 412032 },
+ }
+};
- data.threeTermBuckets = {
- hits: { total: 1000, hits: [], max_score: 0 },
- aggregations: {
- agg_2: {
- buckets: [
- {
- key: 'png',
- doc_count: 50,
- agg_1: { value: 412032 },
- agg_3: {
- buckets: [
- {
- key: 'IT',
- doc_count: 10,
- agg_1: { value: 9299 },
- agg_4: {
- buckets: [
- { key: 'win', doc_count: 4, agg_1: { value: 0 } },
- { key: 'mac', doc_count: 6, agg_1: { value: 9299 } }
- ]
- }
- },
- {
- key: 'US',
- doc_count: 20,
- agg_1: { value: 8293 },
- agg_4: {
- buckets: [
- { key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
- { key: 'mac', doc_count: 8, agg_1: { value: 3029 } }
- ]
- }
+data.threeTermBuckets = {
+ hits: { total: 1000, hits: [], max_score: 0 },
+ aggregations: {
+ agg_2: {
+ buckets: [
+ {
+ key: 'png',
+ doc_count: 50,
+ agg_1: { value: 412032 },
+ agg_3: {
+ buckets: [
+ {
+ key: 'IT',
+ doc_count: 10,
+ agg_1: { value: 9299 },
+ agg_4: {
+ buckets: [
+ { key: 'win', doc_count: 4, agg_1: { value: 0 } },
+ { key: 'mac', doc_count: 6, agg_1: { value: 9299 } }
+ ]
+ }
+ },
+ {
+ key: 'US',
+ doc_count: 20,
+ agg_1: { value: 8293 },
+ agg_4: {
+ buckets: [
+ { key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
+ { key: 'mac', doc_count: 8, agg_1: { value: 3029 } }
+ ]
}
- ]
- }
- },
- {
- key: 'css',
- doc_count: 20,
- agg_1: { value: 412032 },
- agg_3: {
- buckets: [
- {
- key: 'MX',
- doc_count: 7,
- agg_1: { value: 9299 },
- agg_4: {
- buckets: [
- { key: 'win', doc_count: 3, agg_1: { value: 4992 } },
- { key: 'mac', doc_count: 4, agg_1: { value: 5892 } }
- ]
- }
- },
- {
- key: 'US',
- doc_count: 13,
- agg_1: { value: 8293 },
- agg_4: {
- buckets: [
- { key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
- { key: 'mac', doc_count: 1, agg_1: { value: 3029 } }
- ]
- }
+ }
+ ]
+ }
+ },
+ {
+ key: 'css',
+ doc_count: 20,
+ agg_1: { value: 412032 },
+ agg_3: {
+ buckets: [
+ {
+ key: 'MX',
+ doc_count: 7,
+ agg_1: { value: 9299 },
+ agg_4: {
+ buckets: [
+ { key: 'win', doc_count: 3, agg_1: { value: 4992 } },
+ { key: 'mac', doc_count: 4, agg_1: { value: 5892 } }
+ ]
}
- ]
- }
- },
- {
- key: 'html',
- doc_count: 90,
- agg_1: { value: 412032 },
- agg_3: {
- buckets: [
- {
- key: 'CN',
- doc_count: 85,
- agg_1: { value: 9299 },
- agg_4: {
- buckets: [
- { key: 'win', doc_count: 46, agg_1: { value: 4992 } },
- { key: 'mac', doc_count: 39, agg_1: { value: 5892 } }
- ]
- }
- },
- {
- key: 'FR',
- doc_count: 15,
- agg_1: { value: 8293 },
- agg_4: {
- buckets: [
- { key: 'win', doc_count: 3, agg_1: { value: 3992 } },
- { key: 'mac', doc_count: 12, agg_1: { value: 3029 } }
- ]
- }
+ },
+ {
+ key: 'US',
+ doc_count: 13,
+ agg_1: { value: 8293 },
+ agg_4: {
+ buckets: [
+ { key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
+ { key: 'mac', doc_count: 1, agg_1: { value: 3029 } }
+ ]
}
- ]
- }
+ }
+ ]
}
- ]
- }
+ },
+ {
+ key: 'html',
+ doc_count: 90,
+ agg_1: { value: 412032 },
+ agg_3: {
+ buckets: [
+ {
+ key: 'CN',
+ doc_count: 85,
+ agg_1: { value: 9299 },
+ agg_4: {
+ buckets: [
+ { key: 'win', doc_count: 46, agg_1: { value: 4992 } },
+ { key: 'mac', doc_count: 39, agg_1: { value: 5892 } }
+ ]
+ }
+ },
+ {
+ key: 'FR',
+ doc_count: 15,
+ agg_1: { value: 8293 },
+ agg_4: {
+ buckets: [
+ { key: 'win', doc_count: 3, agg_1: { value: 3992 } },
+ { key: 'mac', doc_count: 12, agg_1: { value: 3029 } }
+ ]
+ }
+ }
+ ]
+ }
+ }
+ ]
}
- };
+ }
+};
- data.oneRangeBucket = {
- 'took': 35,
- 'timed_out': false,
- '_shards': {
- 'total': 1,
- 'successful': 1,
- 'failed': 0
- },
- 'hits': {
- 'total': 6039,
- 'max_score': 0,
- 'hits': []
- },
- 'aggregations': {
- 'agg_2': {
- 'buckets': {
- '0.0-1000.0': {
- 'from': 0,
- 'from_as_string': '0.0',
- 'to': 1000,
- 'to_as_string': '1000.0',
- 'doc_count': 606
- },
- '1000.0-2000.0': {
- 'from': 1000,
- 'from_as_string': '1000.0',
- 'to': 2000,
- 'to_as_string': '2000.0',
- 'doc_count': 298
- }
+data.oneRangeBucket = {
+ 'took': 35,
+ 'timed_out': false,
+ '_shards': {
+ 'total': 1,
+ 'successful': 1,
+ 'failed': 0
+ },
+ 'hits': {
+ 'total': 6039,
+ 'max_score': 0,
+ 'hits': []
+ },
+ 'aggregations': {
+ 'agg_2': {
+ 'buckets': {
+ '0.0-1000.0': {
+ 'from': 0,
+ 'from_as_string': '0.0',
+ 'to': 1000,
+ 'to_as_string': '1000.0',
+ 'doc_count': 606
+ },
+ '1000.0-2000.0': {
+ 'from': 1000,
+ 'from_as_string': '1000.0',
+ 'to': 2000,
+ 'to_as_string': '2000.0',
+ 'doc_count': 298
}
}
}
- };
+ }
+};
- data.oneFilterBucket = {
- 'took': 11,
- 'timed_out': false,
- '_shards': {
- 'total': 1,
- 'successful': 1,
- 'failed': 0
- },
- 'hits': {
- 'total': 6005,
- 'max_score': 0,
- 'hits': []
- },
- 'aggregations': {
- 'agg_2': {
- 'buckets': {
- '_type:apache': {
- 'doc_count': 4844
- },
- '_type:nginx': {
- 'doc_count': 1161
- }
+data.oneFilterBucket = {
+ 'took': 11,
+ 'timed_out': false,
+ '_shards': {
+ 'total': 1,
+ 'successful': 1,
+ 'failed': 0
+ },
+ 'hits': {
+ 'total': 6005,
+ 'max_score': 0,
+ 'hits': []
+ },
+ 'aggregations': {
+ 'agg_2': {
+ 'buckets': {
+ '_type:apache': {
+ 'doc_count': 4844
+ },
+ '_type:nginx': {
+ 'doc_count': 1161
}
}
}
- };
+ }
+};
- data.oneHistogramBucket = {
- 'took': 37,
- 'timed_out': false,
- '_shards': {
- 'total': 6,
- 'successful': 6,
- 'failed': 0
- },
- 'hits': {
- 'total': 49208,
- 'max_score': 0,
- 'hits': []
- },
- 'aggregations': {
- 'agg_2': {
- 'buckets': [
- {
- 'key_as_string': '2014-09-28T00:00:00.000Z',
- 'key': 1411862400000,
- 'doc_count': 8247
- },
- {
- 'key_as_string': '2014-09-29T00:00:00.000Z',
- 'key': 1411948800000,
- 'doc_count': 8184
- },
- {
- 'key_as_string': '2014-09-30T00:00:00.000Z',
- 'key': 1412035200000,
- 'doc_count': 8269
- },
- {
- 'key_as_string': '2014-10-01T00:00:00.000Z',
- 'key': 1412121600000,
- 'doc_count': 8141
- },
- {
- 'key_as_string': '2014-10-02T00:00:00.000Z',
- 'key': 1412208000000,
- 'doc_count': 8148
- },
- {
- 'key_as_string': '2014-10-03T00:00:00.000Z',
- 'key': 1412294400000,
- 'doc_count': 8219
- }
- ]
- }
+data.oneHistogramBucket = {
+ 'took': 37,
+ 'timed_out': false,
+ '_shards': {
+ 'total': 6,
+ 'successful': 6,
+ 'failed': 0
+ },
+ 'hits': {
+ 'total': 49208,
+ 'max_score': 0,
+ 'hits': []
+ },
+ 'aggregations': {
+ 'agg_2': {
+ 'buckets': [
+ {
+ 'key_as_string': '2014-09-28T00:00:00.000Z',
+ 'key': 1411862400000,
+ 'doc_count': 8247
+ },
+ {
+ 'key_as_string': '2014-09-29T00:00:00.000Z',
+ 'key': 1411948800000,
+ 'doc_count': 8184
+ },
+ {
+ 'key_as_string': '2014-09-30T00:00:00.000Z',
+ 'key': 1412035200000,
+ 'doc_count': 8269
+ },
+ {
+ 'key_as_string': '2014-10-01T00:00:00.000Z',
+ 'key': 1412121600000,
+ 'doc_count': 8141
+ },
+ {
+ 'key_as_string': '2014-10-02T00:00:00.000Z',
+ 'key': 1412208000000,
+ 'doc_count': 8148
+ },
+ {
+ 'key_as_string': '2014-10-03T00:00:00.000Z',
+ 'key': 1412294400000,
+ 'doc_count': 8219
+ }
+ ]
}
- };
+ }
+};
- return data;
-});
+export default data;
diff --git a/src/fixtures/fake_row.js b/src/fixtures/fake_row.js
index 855d6bd7e50af..032fe0f1a9235 100644
--- a/src/fixtures/fake_row.js
+++ b/src/fixtures/fake_row.js
@@ -1,22 +1,20 @@
-define(function (require) {
- var _ = require('lodash');
- var longString = Array(200).join('_');
+import _ from 'lodash';
+let longString = Array(200).join('_');
- return function (id, mapping) {
- function fakeVals(type) {
- return _.mapValues(mapping, function (f, c) {
- return c + '_' + type + '_' + id + longString;
- });
- }
+export default function (id, mapping) {
+ function fakeVals(type) {
+ return _.mapValues(mapping, function (f, c) {
+ return c + '_' + type + '_' + id + longString;
+ });
+ }
- return {
- _id: id,
- _index: 'test',
- _source: fakeVals('original'),
- sort: [id],
- $$_formatted: fakeVals('formatted'),
- $$_partialFormatted: fakeVals('formatted'),
- $$_flattened: fakeVals('_flattened')
- };
+ return {
+ _id: id,
+ _index: 'test',
+ _source: fakeVals('original'),
+ sort: [id],
+ $$_formatted: fakeVals('formatted'),
+ $$_partialFormatted: fakeVals('formatted'),
+ $$_flattened: fakeVals('_flattened')
};
-});
+};
diff --git a/src/fixtures/field_mapping.js b/src/fixtures/field_mapping.js
index fa18a02a68a0c..b6c38ee8aac29 100644
--- a/src/fixtures/field_mapping.js
+++ b/src/fixtures/field_mapping.js
@@ -1,62 +1,60 @@
-define(function (require) {
- return {
- test: {
- mappings: {
- testType: {
- 'baz': {
- full_name: 'baz',
- mapping: {
- bar: {
- type: 'long'
- }
+export default {
+ test: {
+ mappings: {
+ testType: {
+ 'baz': {
+ full_name: 'baz',
+ mapping: {
+ bar: {
+ type: 'long'
}
- },
- 'foo.bar': {
- full_name: 'foo.bar',
- mapping: {
- bar: {
- type: 'string',
- }
+ }
+ },
+ 'foo.bar': {
+ full_name: 'foo.bar',
+ mapping: {
+ bar: {
+ type: 'string',
}
- },
- 'not_analyzed_field': {
- full_name: 'not_analyzed_field',
- mapping: {
- bar: {
- type: 'string',
- index: 'not_analyzed'
- }
+ }
+ },
+ 'not_analyzed_field': {
+ full_name: 'not_analyzed_field',
+ mapping: {
+ bar: {
+ type: 'string',
+ index: 'not_analyzed'
}
- },
- 'index_no_field': {
- full_name: 'index_no_field',
- mapping: {
- bar: {
- type: 'string',
- index: 'no'
- }
+ }
+ },
+ 'index_no_field': {
+ full_name: 'index_no_field',
+ mapping: {
+ bar: {
+ type: 'string',
+ index: 'no'
}
- },
- _id: {
- full_name: '_id',
- mapping: {
- _id: {
- store: false,
- index: 'no',
- }
+ }
+ },
+ _id: {
+ full_name: '_id',
+ mapping: {
+ _id: {
+ store: false,
+ index: 'no',
}
- },
- _timestamp: {
- full_name: '_timestamp',
- mapping: {
- _timestamp: {
- store: true,
- index: 'no',
- }
+ }
+ },
+ _timestamp: {
+ full_name: '_timestamp',
+ mapping: {
+ _timestamp: {
+ store: true,
+ index: 'no',
}
}
}
}
}
- };
-});
\ No newline at end of file
+ }
+};
\ No newline at end of file
diff --git a/src/fixtures/filter_skeleton.js b/src/fixtures/filter_skeleton.js
index f1eb523d428d2..c5890fb528d9e 100644
--- a/src/fixtures/filter_skeleton.js
+++ b/src/fixtures/filter_skeleton.js
@@ -1,7 +1,5 @@
-define(function (require) {
- return {
- meta: {
- index: 'logstash-*'
- }
- };
-});
+export default {
+ meta: {
+ index: 'logstash-*'
+ }
+};
diff --git a/src/fixtures/hits.js b/src/fixtures/hits.js
index 92665b504bbea..dc7811c9452b6 100644
--- a/src/fixtures/hits.js
+++ b/src/fixtures/hits.js
@@ -1,24 +1,22 @@
-define(function (require) {
- var _ = require('lodash');
- return function fitsFixture() {
- return _.map([
- {_source: {'@timestamp': 0, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 10, request: 'foo'}},
- {_source: {'@timestamp': 1, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 20, request: 'bar'}},
- {_source: {'@timestamp': 2, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'bar'}},
- {_source: {'@timestamp': 3, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
- {_source: {'@timestamp': 4, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
- {_source: {'@timestamp': 5, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
- {_source: {'@timestamp': 6, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
- {_source: {'@timestamp': 7, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
- {_source: {'@timestamp': 8, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
- {_source: {'@timestamp': 9, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
- ], function (p, i) {
- return _.merge({}, p, {
- _score: 1,
- _id: 1000 + i,
- _type: 'test',
- _index: 'test-index'
- });
+import _ from 'lodash';
+export default function fitsFixture() {
+ return _.map([
+ {_source: {'@timestamp': 0, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 10, request: 'foo'}},
+ {_source: {'@timestamp': 1, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 20, request: 'bar'}},
+ {_source: {'@timestamp': 2, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'bar'}},
+ {_source: {'@timestamp': 3, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
+ {_source: {'@timestamp': 4, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
+ {_source: {'@timestamp': 5, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
+ {_source: {'@timestamp': 6, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
+ {_source: {'@timestamp': 7, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
+ {_source: {'@timestamp': 8, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
+ {_source: {'@timestamp': 9, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
+ ], function (p, i) {
+ return _.merge({}, p, {
+ _score: 1,
+ _id: 1000 + i,
+ _type: 'test',
+ _index: 'test-index'
});
- };
-});
+ });
+};
diff --git a/src/fixtures/logstash_fields.js b/src/fixtures/logstash_fields.js
index ec6a36e33d7b1..e4f993da81470 100644
--- a/src/fixtures/logstash_fields.js
+++ b/src/fixtures/logstash_fields.js
@@ -1,37 +1,35 @@
-define(function (require) {
- function stubbedLogstashFields() {
- var sourceData = [
- { name: 'bytes', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true, count: 10 },
- { name: 'ssl', type: 'boolean', indexed: true, analyzed: true, sortable: true, filterable: true, count: 20 },
- { name: '@timestamp', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
- { name: 'time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
- { name: '@tags', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
- { name: 'utc_time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true },
- { name: 'phpmemory', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true },
- { name: 'ip', type: 'ip', indexed: true, analyzed: true, sortable: true, filterable: true },
- { name: 'request_body', type: 'attachment', indexed: true, analyzed: true, sortable: false, filterable: true },
- { name: 'point', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: false },
- { name: 'area', type: 'geo_shape', indexed: true, analyzed: true, sortable: true, filterable: false },
- { name: 'hashed', type: 'murmur3', indexed: true, analyzed: true, sortable: false, filterable: false },
- { name: 'geo.coordinates', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: true },
- { name: 'extension', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
- { name: 'machine.os', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
- { name: 'geo.src', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
- { name: '_type', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
- { name: '_id', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: true},
- { name: '_source', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: false},
- { name: 'custom_user_field', type: 'conflict', indexed: false, analyzed: false, sortable: false, filterable: true },
- { name: 'script string', type: 'string', scripted: true, script: '\'i am a string\'', lang: 'expression' },
- { name: 'script number', type: 'number', scripted: true, script: '1234', lang: 'expression' },
- { name: 'script murmur3', type: 'murmur3', scripted: true, script: '1234', lang: 'expression'},
- ].map(function (field) {
- field.count = field.count || 0;
- field.scripted = field.scripted || false;
- return field;
- });
+function stubbedLogstashFields() {
+ let sourceData = [
+ { name: 'bytes', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true, count: 10 },
+ { name: 'ssl', type: 'boolean', indexed: true, analyzed: true, sortable: true, filterable: true, count: 20 },
+ { name: '@timestamp', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
+ { name: 'time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
+ { name: '@tags', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
+ { name: 'utc_time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true },
+ { name: 'phpmemory', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true },
+ { name: 'ip', type: 'ip', indexed: true, analyzed: true, sortable: true, filterable: true },
+ { name: 'request_body', type: 'attachment', indexed: true, analyzed: true, sortable: false, filterable: true },
+ { name: 'point', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: false },
+ { name: 'area', type: 'geo_shape', indexed: true, analyzed: true, sortable: true, filterable: false },
+ { name: 'hashed', type: 'murmur3', indexed: true, analyzed: true, sortable: false, filterable: false },
+ { name: 'geo.coordinates', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: true },
+ { name: 'extension', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
+ { name: 'machine.os', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
+ { name: 'geo.src', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
+ { name: '_type', type: 'string', indexed: false, analyzed: true, sortable: true, filterable: true },
+ { name: '_id', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: true},
+ { name: '_source', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: false},
+ { name: 'custom_user_field', type: 'conflict', indexed: false, analyzed: false, sortable: false, filterable: true },
+ { name: 'script string', type: 'string', scripted: true, script: '\'i am a string\'', lang: 'expression' },
+ { name: 'script number', type: 'number', scripted: true, script: '1234', lang: 'expression' },
+ { name: 'script murmur3', type: 'murmur3', scripted: true, script: '1234', lang: 'expression'},
+ ].map(function (field) {
+ field.count = field.count || 0;
+ field.scripted = field.scripted || false;
+ return field;
+ });
- return sourceData;
- }
+ return sourceData;
+}
- return stubbedLogstashFields;
-});
+export default stubbedLogstashFields;
diff --git a/src/fixtures/mapping_with_dupes.js b/src/fixtures/mapping_with_dupes.js
index d43f8ebc8a694..c404d3ea04af2 100644
--- a/src/fixtures/mapping_with_dupes.js
+++ b/src/fixtures/mapping_with_dupes.js
@@ -1,40 +1,38 @@
-define(function (require) {
- return {
- test: {
- mappings: {
- testType: {
- 'baz': {
- full_name: 'baz',
- mapping: {
- bar: {
- type: 'long'
- }
+export default {
+ test: {
+ mappings: {
+ testType: {
+ 'baz': {
+ full_name: 'baz',
+ mapping: {
+ bar: {
+ type: 'long'
}
- },
- 'foo.bar': {
- full_name: 'foo.bar',
- mapping: {
- bar: {
- type: 'string'
- }
+ }
+ },
+ 'foo.bar': {
+ full_name: 'foo.bar',
+ mapping: {
+ bar: {
+ type: 'string'
}
}
}
}
- },
- duplicates: {
- mappings: {
- testType: {
- 'baz': {
- full_name: 'baz',
- mapping: {
- bar: {
- type: 'date'
- }
+ }
+ },
+ duplicates: {
+ mappings: {
+ testType: {
+ 'baz': {
+ full_name: 'baz',
+ mapping: {
+ bar: {
+ type: 'date'
}
}
}
}
}
- };
-});
\ No newline at end of file
+ }
+};
\ No newline at end of file
diff --git a/src/fixtures/mock_courier.js b/src/fixtures/mock_courier.js
index 07e6de0091f62..ef68696693551 100644
--- a/src/fixtures/mock_courier.js
+++ b/src/fixtures/mock_courier.js
@@ -1,17 +1,16 @@
-define(function (require) {
- var _ = require('lodash');
- var sinon = require('auto-release-sinon');
+import _ from 'lodash';
+import sinon from 'auto-release-sinon';
+import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
- return function (Private, Promise) {
- var indexPatterns = Private(require('fixtures/stubbed_logstash_index_pattern'));
- var getIndexPatternStub = sinon.stub();
- getIndexPatternStub.returns(Promise.resolve(indexPatterns));
+export default function (Private, Promise) {
+ let indexPatterns = Private(FixturesStubbedLogstashIndexPatternProvider);
+ let getIndexPatternStub = sinon.stub();
+ getIndexPatternStub.returns(Promise.resolve(indexPatterns));
- var courier = {
- indexPatterns: { get: getIndexPatternStub },
- getStub: getIndexPatternStub
- };
-
- return courier;
+ let courier = {
+ indexPatterns: { get: getIndexPatternStub },
+ getStub: getIndexPatternStub
};
-});
+
+ return courier;
+};
diff --git a/src/fixtures/mock_state.js b/src/fixtures/mock_state.js
index 1346b8386ec02..43d579efe8edc 100644
--- a/src/fixtures/mock_state.js
+++ b/src/fixtures/mock_state.js
@@ -1,18 +1,17 @@
-define(function (require) {
- var _ = require('lodash');
- var sinon = require('auto-release-sinon');
+import _ from 'lodash';
+import sinon from 'auto-release-sinon';
- function MockState(defaults) {
- this.on = _.noop;
- this.off = _.noop;
- this.save = sinon.stub();
- _.assign(this, defaults);
- }
+function MockState(defaults) {
+ this.on = _.noop;
+ this.off = _.noop;
+ this.save = sinon.stub();
+ this.replace = sinon.stub();
+ _.assign(this, defaults);
+}
- MockState.prototype.resetStub = function () {
- this.save = sinon.stub();
- return this;
- };
+MockState.prototype.resetStub = function () {
+ this.save = sinon.stub();
+ return this;
+};
- return MockState;
-});
+export default MockState;
diff --git a/src/fixtures/mock_ui_state.js b/src/fixtures/mock_ui_state.js
new file mode 100644
index 0000000000000..50d913ae337e8
--- /dev/null
+++ b/src/fixtures/mock_ui_state.js
@@ -0,0 +1,13 @@
+import _ from 'lodash';
+let keys = {};
+export default {
+ get: function (path, def) {
+ return keys[path] == null ? def : keys[path];
+ },
+ set: function (path, val) {
+ keys[path] = val;
+ return val;
+ },
+ on: _.noop,
+ off: _.noop
+}
diff --git a/src/fixtures/real_hits.js b/src/fixtures/real_hits.js
index 55047c2f41da2..6e47b15e6c33d 100644
--- a/src/fixtures/real_hits.js
+++ b/src/fixtures/real_hits.js
@@ -1,227 +1,225 @@
-define(function (require) {
- /*
- Extensions:
- gif: 5
- html: 8
- php: 5 (thus 5 with phpmemory fields)
- png: 2
+/*
+ Extensions:
+ gif: 5
+ html: 8
+ php: 5 (thus 5 with phpmemory fields)
+ png: 2
- _type:
- apache: 18
- nginx: 2
+ _type:
+ apache: 18
+ nginx: 2
- Bytes (all unique except):
- 374: 2
+ Bytes (all unique except):
+ 374: 2
- All have the same index, ids are unique
- */
+ All have the same index, ids are unique
+*/
- return [
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '61',
- '_score': 1,
- '_source': {
- 'extension': 'html',
- 'bytes': 360.20000000000005
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '388',
- '_score': 1,
- '_source': {
- 'extension': 'gif',
- 'bytes': 5848.700000000001
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '403',
- '_score': 1,
- '_source': {
- 'extension': 'png',
- 'bytes': 841.6
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '415',
- '_score': 1,
- '_source': {
- 'extension': 'html',
- 'bytes': 1626.4
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '460',
- '_score': 1,
- '_source': {
- 'extension': 'php',
- 'bytes': 2070.6,
- 'phpmemory': 276080
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '496',
- '_score': 1,
- '_source': {
- 'extension': 'gif',
- 'bytes': 8421.6
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '511',
- '_score': 1,
- '_source': {
- 'extension': 'html',
- 'bytes': 994.8000000000001
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '701',
- '_score': 1,
- '_source': {
- 'extension': 'html',
- 'bytes': 374
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '838',
- '_score': 1,
- '_source': {
- 'extension': 'php',
- 'bytes': 506.09999999999997,
- 'phpmemory': 67480
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '890',
- '_score': 1,
- '_source': {
- 'extension': 'php',
- 'bytes': 506.09999999999997,
- 'phpmemory': 67480
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'nginx',
- '_id': '927',
- '_score': 1,
- '_source': {
- 'extension': 'php',
- 'bytes': 2591.1,
- 'phpmemory': 345480
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '1034',
- '_score': 1,
- '_source': {
- 'extension': 'html',
- 'bytes': 1450
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '1142',
- '_score': 1,
- '_source': {
- 'extension': 'php',
- 'bytes': 1803.8999999999999,
- 'phpmemory': 240520
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '1180',
- '_score': 1,
- '_source': {
- 'extension': 'html',
- 'bytes': 1626.4
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'nginx',
- '_id': '1224',
- '_score': 1,
- '_source': {
- 'extension': 'gif',
- 'bytes': 10617.2
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '1243',
- '_score': 1,
- '_source': {
- 'extension': 'gif',
- 'bytes': 10961.5
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '1510',
- '_score': 1,
- '_source': {
- 'extension': 'html',
- 'bytes': 382.8
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '1628',
- '_score': 1,
- '_source': {
- 'extension': 'html',
- 'bytes': 374
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '1729',
- '_score': 1,
- '_source': {
- 'extension': 'png',
- 'bytes': 3059.2000000000003
- }
- },
- {
- '_index': 'logstash-2014.09.09',
- '_type': 'apache',
- '_id': '1945',
- '_score': 1,
- '_source': {
- 'extension': 'gif',
- 'bytes': 10617.2
- }
- }
- ];
-});
+export default [
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '61',
+ '_score': 1,
+ '_source': {
+ 'extension': 'html',
+ 'bytes': 360.20000000000005
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '388',
+ '_score': 1,
+ '_source': {
+ 'extension': 'gif',
+ 'bytes': 5848.700000000001
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '403',
+ '_score': 1,
+ '_source': {
+ 'extension': 'png',
+ 'bytes': 841.6
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '415',
+ '_score': 1,
+ '_source': {
+ 'extension': 'html',
+ 'bytes': 1626.4
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '460',
+ '_score': 1,
+ '_source': {
+ 'extension': 'php',
+ 'bytes': 2070.6,
+ 'phpmemory': 276080
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '496',
+ '_score': 1,
+ '_source': {
+ 'extension': 'gif',
+ 'bytes': 8421.6
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '511',
+ '_score': 1,
+ '_source': {
+ 'extension': 'html',
+ 'bytes': 994.8000000000001
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '701',
+ '_score': 1,
+ '_source': {
+ 'extension': 'html',
+ 'bytes': 374
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '838',
+ '_score': 1,
+ '_source': {
+ 'extension': 'php',
+ 'bytes': 506.09999999999997,
+ 'phpmemory': 67480
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '890',
+ '_score': 1,
+ '_source': {
+ 'extension': 'php',
+ 'bytes': 506.09999999999997,
+ 'phpmemory': 67480
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'nginx',
+ '_id': '927',
+ '_score': 1,
+ '_source': {
+ 'extension': 'php',
+ 'bytes': 2591.1,
+ 'phpmemory': 345480
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '1034',
+ '_score': 1,
+ '_source': {
+ 'extension': 'html',
+ 'bytes': 1450
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '1142',
+ '_score': 1,
+ '_source': {
+ 'extension': 'php',
+ 'bytes': 1803.8999999999999,
+ 'phpmemory': 240520
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '1180',
+ '_score': 1,
+ '_source': {
+ 'extension': 'html',
+ 'bytes': 1626.4
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'nginx',
+ '_id': '1224',
+ '_score': 1,
+ '_source': {
+ 'extension': 'gif',
+ 'bytes': 10617.2
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '1243',
+ '_score': 1,
+ '_source': {
+ 'extension': 'gif',
+ 'bytes': 10961.5
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '1510',
+ '_score': 1,
+ '_source': {
+ 'extension': 'html',
+ 'bytes': 382.8
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '1628',
+ '_score': 1,
+ '_source': {
+ 'extension': 'html',
+ 'bytes': 374
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '1729',
+ '_score': 1,
+ '_source': {
+ 'extension': 'png',
+ 'bytes': 3059.2000000000003
+ }
+ },
+ {
+ '_index': 'logstash-2014.09.09',
+ '_type': 'apache',
+ '_id': '1945',
+ '_score': 1,
+ '_source': {
+ 'extension': 'gif',
+ 'bytes': 10617.2
+ }
+ }
+];
diff --git a/src/fixtures/search_response.js b/src/fixtures/search_response.js
index 3c0b495da06a4..a035e7ccf2e32 100644
--- a/src/fixtures/search_response.js
+++ b/src/fixtures/search_response.js
@@ -1,18 +1,16 @@
-define(function (require) {
- var hits = require('fixtures/real_hits');
+import hits from 'fixtures/real_hits';
- return {
- took: 73,
- timed_out: false,
- _shards: {
- total: 144,
- successful: 144,
- failed: 0
- },
- hits: {
- total : 49487,
- max_score : 1.0,
- hits: hits
- }
- };
-});
\ No newline at end of file
+export default {
+ took: 73,
+ timed_out: false,
+ _shards: {
+ total: 144,
+ successful: 144,
+ failed: 0
+ },
+ hits: {
+ total : 49487,
+ max_score : 1.0,
+ hits: hits
+ }
+};
\ No newline at end of file
diff --git a/src/fixtures/stubbed_doc_source_response.js b/src/fixtures/stubbed_doc_source_response.js
index a26f73c1f7786..5bed6d41af348 100644
--- a/src/fixtures/stubbed_doc_source_response.js
+++ b/src/fixtures/stubbed_doc_source_response.js
@@ -1,22 +1,22 @@
-define(function (require) {
- function stubbedDocSourceResponse(Private) {
- var mockLogstashFields = Private(require('fixtures/logstash_fields'));
+import FixturesLogstashFieldsProvider from 'fixtures/logstash_fields';
- return function (id, index) {
- index = index || '.kibana';
- return {
- _id: id,
- _index: index,
- _type: 'index-pattern',
- _version: 2,
- found: true,
- _source: {
- customFormats: '{}',
- fields: JSON.stringify(mockLogstashFields)
- }
- };
+function stubbedDocSourceResponse(Private) {
+ let mockLogstashFields = Private(FixturesLogstashFieldsProvider);
+
+ return function (id, index) {
+ index = index || '.kibana';
+ return {
+ _id: id,
+ _index: index,
+ _type: 'index-pattern',
+ _version: 2,
+ found: true,
+ _source: {
+ customFormats: '{}',
+ fields: JSON.stringify(mockLogstashFields)
+ }
};
- }
+ };
+}
- return stubbedDocSourceResponse;
-});
\ No newline at end of file
+export default stubbedDocSourceResponse;
\ No newline at end of file
diff --git a/src/fixtures/stubbed_logstash_index_pattern.js b/src/fixtures/stubbed_logstash_index_pattern.js
index 2b114919a81e5..8377132b6d8ea 100644
--- a/src/fixtures/stubbed_logstash_index_pattern.js
+++ b/src/fixtures/stubbed_logstash_index_pattern.js
@@ -1,24 +1,25 @@
-define(function (require) {
- return function stubbedLogstashIndexPatternService(Private) {
- var StubIndexPattern = Private(require('testUtils/stubIndexPattern'));
- var fieldTypes = Private(require('ui/index_patterns/_field_types'));
- var mockLogstashFields = Private(require('fixtures/logstash_fields'));
+import _ from 'lodash';
+import TestUtilsStubIndexPatternProvider from 'test_utils/stub_index_pattern';
+import IndexPatternsFieldTypesProvider from 'ui/index_patterns/_field_types';
+import FixturesLogstashFieldsProvider from 'fixtures/logstash_fields';
+export default function stubbedLogstashIndexPatternService(Private) {
+ let StubIndexPattern = Private(TestUtilsStubIndexPatternProvider);
+ let fieldTypes = Private(IndexPatternsFieldTypesProvider);
+ let mockLogstashFields = Private(FixturesLogstashFieldsProvider);
- var _ = require('lodash');
- var fields = mockLogstashFields.map(function (field) {
- field.displayName = field.name;
- var type = fieldTypes.byName[field.type];
- if (!type) throw new TypeError('unknown type ' + field.type);
- if (!_.has(field, 'sortable')) field.sortable = type.sortable;
- if (!_.has(field, 'filterable')) field.filterable = type.filterable;
- return field;
- });
+ let fields = mockLogstashFields.map(function (field) {
+ field.displayName = field.name;
+ let type = fieldTypes.byName[field.type];
+ if (!type) throw new TypeError('unknown type ' + field.type);
+ if (!_.has(field, 'sortable')) field.sortable = type.sortable;
+ if (!_.has(field, 'filterable')) field.filterable = type.filterable;
+ return field;
+ });
- var indexPattern = new StubIndexPattern('logstash-*', 'time', fields);
- indexPattern.id = 'logstash-*';
+ let indexPattern = new StubIndexPattern('logstash-*', 'time', fields);
+ indexPattern.id = 'logstash-*';
- return indexPattern;
+ return indexPattern;
- };
-});
+};
diff --git a/src/fixtures/stubbed_search_source.js b/src/fixtures/stubbed_search_source.js
index d80e52c6cf5c3..118aaede15047 100644
--- a/src/fixtures/stubbed_search_source.js
+++ b/src/fixtures/stubbed_search_source.js
@@ -1,36 +1,38 @@
-define(function (require) {
- var sinon = require('auto-release-sinon');
- var searchResponse = require('fixtures/search_response');
+import sinon from 'auto-release-sinon';
+import searchResponse from 'fixtures/search_response';
+import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
- return function stubSearchSource(Private, $q) {
- var deferedResult = $q.defer();
-
- return {
- sort: sinon.spy(),
- size: sinon.spy(),
- fetch: sinon.spy(),
- destroy: sinon.spy(),
- get: function (param) {
- switch (param) {
- case 'index':
- return Private(require('fixtures/stubbed_logstash_index_pattern'));
- default:
- throw new Error('Param "' + param + '" is not implemented in the stubbed search source');
- }
- },
- crankResults: function () {
- deferedResult.resolve(searchResponse);
- deferedResult = $q.defer();
- },
- onResults: function () {
- // Up to the test to resolve this manually
- // For example:
- // someHandler.resolve(require('fixtures/search_response'))
- return deferedResult.promise;
- },
- onError: function () { return $q.defer().promise; },
-
- };
+export default function stubSearchSource(Private, $q, Promise) {
+ let deferedResult = $q.defer();
+ let indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
+ return {
+ sort: sinon.spy(),
+ size: sinon.spy(),
+ fetch: sinon.spy(),
+ destroy: sinon.spy(),
+ get: function (param) {
+ switch (param) {
+ case 'index':
+ return indexPattern;
+ default:
+ throw new Error('Param "' + param + '" is not implemented in the stubbed search source');
+ }
+ },
+ crankResults: function () {
+ deferedResult.resolve(searchResponse);
+ deferedResult = $q.defer();
+ },
+ onResults: function () {
+ // Up to the test to resolve this manually
+ // For example:
+ // someHandler.resolve(require('fixtures/search_response'))
+ return deferedResult.promise;
+ },
+ onError: function () { return $q.defer().promise; },
+ _flatten: function () {
+ return Promise.resolve({ index: indexPattern, body: {} });
+ }
};
-});
+
+};
diff --git a/src/fixtures/tilemap_map.js b/src/fixtures/tilemap_map.js
index 5d07de8349212..029bf1ea8afdc 100644
--- a/src/fixtures/tilemap_map.js
+++ b/src/fixtures/tilemap_map.js
@@ -1,21 +1,19 @@
-define(function (require) {
- var sinon = require('auto-release-sinon');
+import sinon from 'auto-release-sinon';
- function MockMap(container, chartData, params) {
- this.container = container;
- this.chartData = chartData;
- this.params = params;
+function MockMap(container, chartData, params) {
+ this.container = container;
+ this.chartData = chartData;
+ this.params = params;
- // stub required methods
- this.addStubs();
- }
+ // stub required methods
+ this.addStubs();
+}
- MockMap.prototype.addStubs = function () {
- this.addTitle = sinon.stub();
- this.addFitControl = sinon.stub();
- this.addBoundingControl = sinon.stub();
- this.destroy = sinon.stub();
- };
+MockMap.prototype.addStubs = function () {
+ this.addTitle = sinon.stub();
+ this.addFitControl = sinon.stub();
+ this.addBoundingControl = sinon.stub();
+ this.destroy = sinon.stub();
+};
- return MockMap;
-});
\ No newline at end of file
+export default MockMap;
\ No newline at end of file
diff --git a/src/fixtures/vislib/_vis_fixture.js b/src/fixtures/vislib/_vis_fixture.js
index d7e74a634f3c0..8f06190120053 100644
--- a/src/fixtures/vislib/_vis_fixture.js
+++ b/src/fixtures/vislib/_vis_fixture.js
@@ -1,9 +1,22 @@
-var $ = require('jquery');
-var _ = require('lodash');
+import _ from 'lodash';
+import $ from 'jquery';
+import VislibVisProvider from 'ui/vislib/vis';
-var $visCanvas = $('
The Console UI is split into two panes: an editor pane (left) and a response pane (right).
+ Use the editor to type requests and submit them to Elasticsearch. The results will be displayed in
+ the response pane on the right side.
+
+
+
Console understands requests in a compact format, similar to cURL:
+
+
+
While typing a request, Console will make suggestions which you can than accept by hitting Enter/Tab.
+ These suggestions are made based on the request structure as well as your indices and types.
+
+
+
+
A few quick tips, while I have your attention
+
+
Submit requests to ES using the green triangle button.
+
Use the wrench menu for other useful things.
+
You can paste requests in cURL format and they will be translated to the Console syntax.
+
You can resize the editor and output panes by dragging the separator between them.
+
Study the keyboard shortcuts under the Help button. Good stuff in there!
+
+
+
diff --git a/src/plugins/console/public/src/es.js b/src/plugins/console/public/src/es.js
new file mode 100644
index 0000000000000..5a18590f2391d
--- /dev/null
+++ b/src/plugins/console/public/src/es.js
@@ -0,0 +1,48 @@
+let _ = require('lodash');
+let $ = require('jquery');
+
+let esVersion = [];
+
+module.exports.getVersion = function () {
+ return esVersion;
+};
+
+module.exports.send = function (method, path, data, server, disable_auth_alert) {
+ var wrappedDfd = $.Deferred();
+
+ console.log("Calling " + path);
+ if (data && method == "GET") {
+ method = "POST";
+ }
+
+ // delayed loading for circular references
+ var settings = require("./settings");
+
+ var options = {
+ url: '../api/console/proxy?uri=' + encodeURIComponent(path),
+ data: method == "GET" ? null : data,
+ cache: false,
+ crossDomain: true,
+ type: method,
+ dataType: "text", // disable automatic guessing
+ };
+
+
+ $.ajax(options).then(
+ function (data, textStatus, jqXHR) {
+ wrappedDfd.resolveWith(this, [data, textStatus, jqXHR]);
+ },
+ function (jqXHR, textStatus, errorThrown) {
+ if (jqXHR.status == 0) {
+ jqXHR.responseText = "\n\nFailed to connect to Console's backend.\nPlease check the Kibana server is up and running";
+ }
+ wrappedDfd.rejectWith(this, [jqXHR, textStatus, errorThrown]);
+ });
+ return wrappedDfd;
+};
+
+module.exports.constructESUrl = function (baseUri, path) {
+ baseUri = baseUri.replace(/\/+$/, '');
+ path = path.replace(/^\/+/, '');
+ return baseUri + '/' + path;
+};
diff --git a/src/plugins/console/public/src/history.js b/src/plugins/console/public/src/history.js
new file mode 100644
index 0000000000000..eb32cc3fa87db
--- /dev/null
+++ b/src/plugins/console/public/src/history.js
@@ -0,0 +1,64 @@
+const $ = require('jquery');
+const { uniq } = require('lodash');
+const storage = require('./storage');
+const chrome = require('ui/chrome');
+
+const history = module.exports = {
+ restoreFromHistory() {
+ // default method for history.restoreFromHistory
+ // replace externally to do something when the user chooses
+ // to relive a bit of history
+ throw new Error('not implemented');
+ },
+
+ getHistoryKeys() {
+ return storage.keys()
+ .filter(key => key.indexOf('hist_elem') === 0)
+ .sort()
+ .reverse();
+ },
+
+ getHistory() {
+ return history
+ .getHistoryKeys()
+ .map(key => storage.get(key));
+ },
+
+ addToHistory(endpoint, method, data) {
+ var keys = history.getHistoryKeys();
+ keys.splice(0, 500); // only maintain most recent X;
+ $.each(keys, function (i, k) {
+ storage.delete(k);
+ });
+
+ var timestamp = new Date().getTime();
+ var k = "hist_elem_" + timestamp;
+ storage.set(k, {
+ time: timestamp,
+ endpoint: endpoint,
+ method: method,
+ data: data
+ });
+ },
+
+ updateCurrentState(content) {
+ var timestamp = new Date().getTime();
+ storage.set("editor_state", {
+ time: timestamp,
+ content: content
+ });
+ },
+
+ getSavedEditorState() {
+ const saved = storage.get('editor_state');
+ if (!saved) return;
+ const { time, content } = saved;
+ return { time, content };
+ },
+
+ clearHistory($el) {
+ history
+ .getHistoryKeys()
+ .forEach(key => storage.delete(key));
+ }
+};
diff --git a/src/plugins/console/public/src/input.js b/src/plugins/console/public/src/input.js
new file mode 100644
index 0000000000000..97a393c04ec61
--- /dev/null
+++ b/src/plugins/console/public/src/input.js
@@ -0,0 +1,244 @@
+let ace = require('ace');
+let $ = require('jquery');
+let ZeroClipboard = require('zeroclip');
+let ext_searchbox = require('ace/ext-searchbox');
+let Autocomplete = require('./autocomplete');
+let mappings = require('./mappings');
+let output = require('./output');
+let SenseEditor = require('./sense_editor/editor');
+let settings = require('./settings');
+let storage = require('./storage');
+let utils = require('./utils');
+let es = require('./es');
+let history = require('./history');
+import uiModules from 'ui/modules';
+
+var $el = $('#editor');
+var input = new SenseEditor($el);
+uiModules.get('app/sense').setupResizeCheckerForRootEditors($el, input, output);
+
+input.autocomplete = new Autocomplete(input);
+
+input.$actions = $("#editor_actions");
+
+input.commands.addCommand({
+ name: 'auto indent request',
+ bindKey: {win: 'Ctrl-I', mac: 'Command-I'},
+ exec: function () {
+ input.autoIndent();
+ }
+});
+input.commands.addCommand({
+ name: 'move to previous request start or end',
+ bindKey: {win: 'Ctrl-Up', mac: 'Command-Up'},
+ exec: function () {
+ input.moveToPreviousRequestEdge()
+ }
+});
+input.commands.addCommand({
+ name: 'move to next request start or end',
+ bindKey: {win: 'Ctrl-Down', mac: 'Command-Down'},
+ exec: function () {
+ input.moveToNextRequestEdge()
+ }
+});
+
+
+/**
+ * COPY AS CURL
+ *
+ * Since the copy functionality is powered by a flash movie (via ZeroClipboard)
+ * the only way to trigger the copy is with a litteral mouseclick from the user.
+ *
+ * The original shortcut will now just open the menu and highlight the
+ *
+ */
+var $copyAsCURL = $('#copy_as_curl');
+var zc = (function setupZeroClipboard() {
+ var zc = new ZeroClipboard($copyAsCURL); // the ZeroClipboard instance
+
+ zc.on('wrongflash noflash', function () {
+ if (!storage.get('flash_warning_shown')) {
+ alert('Console needs flash version 10.0 or greater in order to provide "Copy as cURL" functionality');
+ storage.set('flash_warning_shown', 'true');
+ }
+ $copyAsCURL.hide();
+ });
+
+ zc.on('ready', function () {
+ function setupCopyButton(cb) {
+ cb = typeof cb === 'function' ? cb : $.noop;
+ $copyAsCURL.css('visibility', 'hidden');
+ input.getRequestsAsCURL(function (curl) {
+ $copyAsCURL.attr('data-clipboard-text', curl);
+ $copyAsCURL.css('visibility', 'visible');
+ cb();
+ });
+ }
+
+ input.$actions.on('mouseenter', function () {
+ if (!$(this).hasClass('open')) {
+ setupCopyButton();
+ }
+ });
+ });
+
+ zc.on('complete', function () {
+ $copyAsCURL.click();
+ input.focus();
+ });
+
+ return zc;
+}());
+
+/**
+ * Setup the "send" shortcut
+ */
+
+var CURRENT_REQ_ID = 0;
+
+function sendCurrentRequestToES() {
+
+ var req_id = ++CURRENT_REQ_ID;
+
+ input.getRequestsInRange(function (requests) {
+ if (req_id != CURRENT_REQ_ID) {
+ return;
+ }
+ output.update('');
+
+ if (requests.length == 0) {
+ return;
+ }
+
+ var isMultiRequest = requests.length > 1;
+ var finishChain = function () { /* noop */ };
+
+ var isFirstRequest = true;
+
+ var sendNextRequest = function () {
+ if (req_id != CURRENT_REQ_ID) {
+ return;
+ }
+ if (requests.length == 0) {
+ finishChain();
+ return;
+ }
+ var req = requests.shift();
+ var es_path = req.url;
+ var es_method = req.method;
+ var es_data = req.data.join("\n");
+ if (es_data) {
+ es_data += "\n";
+ } //append a new line for bulk requests.
+
+ es.send(es_method, es_path, es_data).always(function (dataOrjqXHR, textStatus, jqXhrORerrorThrown) {
+ if (req_id != CURRENT_REQ_ID) {
+ return;
+ }
+ var xhr;
+ if (dataOrjqXHR.promise) {
+ xhr = dataOrjqXHR;
+ }
+ else {
+ xhr = jqXhrORerrorThrown;
+ }
+ function modeForContentType(contentType) {
+ if (contentType.indexOf("text/plain") >= 0) {
+ return "ace/mode/text";
+ }
+ else if (contentType.indexOf("application/yaml") >= 0) {
+ return "ace/mode/yaml";
+ }
+ return null;
+ }
+
+ if (typeof xhr.status == "number" &&
+ // things like DELETE index where the index is not there are OK.
+ ((xhr.status >= 200 && xhr.status < 300) || xhr.status == 404)
+ ) {
+ // we have someone on the other side. Add to history
+ history.addToHistory(es_path, es_method, es_data);
+
+
+ let value = xhr.responseText;
+ let mode = modeForContentType(xhr.getAllResponseHeaders("Content-Type") || "");
+
+ if (mode === null || mode === "application/json") {
+ // assume json - auto pretty
+ try {
+ value = JSON.stringify(JSON.parse(value), null, 2);
+ }
+ catch (e) {
+
+ }
+ }
+
+ if (isMultiRequest) {
+ value = "# " + req.method + " " + req.url + "\n" + value;
+ }
+ if (isFirstRequest) {
+ output.update(value, mode);
+ }
+ else {
+ output.append("\n" + value);
+ }
+ isFirstRequest = false;
+ // single request terminate via sendNextRequest as well
+ sendNextRequest();
+ }
+ else {
+ let value, mode;
+ if (xhr.responseText) {
+ value = xhr.responseText; // ES error should be shown
+ mode = modeForContentType(xhr.getAllResponseHeaders("Content-Type") || "");
+ if (value[0] == "{") {
+ try {
+ value = JSON.stringify(JSON.parse(value), null, 2);
+ }
+ catch (e) {
+ }
+ }
+ } else {
+ value = "Request failed to get to the server (status code: " + xhr.status + ")";
+ mode = 'ace/mode/text';
+ }
+ if (isMultiRequest) {
+ value = "# " + req.method + " " + req.url + "\n" + value;
+ }
+ if (isFirstRequest) {
+ output.update(value, mode);
+ }
+ else {
+ output.append("\n" + value);
+ }
+ finishChain();
+ }
+ });
+ };
+
+ sendNextRequest();
+ });
+}
+
+
+input.commands.addCommand({
+ name: 'send to elasticsearch',
+ bindKey: {win: 'Ctrl-Enter', mac: 'Command-Enter'},
+ exec: sendCurrentRequestToES
+});
+
+
+/**
+ * Init the editor
+ */
+if (settings) {
+ settings.applyCurrentSettings(input);
+}
+input.focus();
+input.highlightCurrentRequestsAndUpdateActionBar();
+
+input.sendCurrentRequestToES = sendCurrentRequestToES;
+require('./input_resize')(input, output);
+
+module.exports = input;
diff --git a/src/plugins/console/public/src/input_resize.js b/src/plugins/console/public/src/input_resize.js
new file mode 100644
index 0000000000000..83688cb4f9cf1
--- /dev/null
+++ b/src/plugins/console/public/src/input_resize.js
@@ -0,0 +1,47 @@
+const $ = require('jquery');
+const storage = require('./storage');
+
+module.exports = function (input, output) {
+
+ const $left = input.$el.parent();
+
+ function readStoredEditorWidth() {
+ return storage.get('editorWidth');
+ }
+
+ function storeEditorWidth(editorWidth) {
+ storage.set('editorWidth', editorWidth);
+ }
+
+ function setEditorWidth(editorWidth) {
+ storeEditorWidth(editorWidth);
+ $left.width(editorWidth);
+ }
+
+ var $resizer = $('#editor_resizer');
+ $resizer
+ .on('mousedown', function (event) {
+ $resizer.addClass('active');
+ var startWidth = $left.width();
+ var startX = event.pageX;
+
+ function onMove(event) {
+ setEditorWidth(startWidth + event.pageX - startX)
+ }
+
+ $(document.body)
+ .on('mousemove', onMove)
+ .one('mouseup', function () {
+ $resizer.removeClass('active');
+ $(this).off('mousemove', onMove);
+ input.resize();
+ output.resize();
+ });
+ });
+
+ const initialEditorWidth = readStoredEditorWidth();
+ if (initialEditorWidth != null) {
+ setEditorWidth(initialEditorWidth);
+ }
+
+}
diff --git a/src/plugins/console/public/src/kb.js b/src/plugins/console/public/src/kb.js
new file mode 100644
index 0000000000000..b005d25f8f4da
--- /dev/null
+++ b/src/plugins/console/public/src/kb.js
@@ -0,0 +1,252 @@
+let $ = require('jquery');
+let _ = require('lodash');
+let mappings = require('./mappings');
+let es = require('./es');
+let Api = require('./kb/api');
+let autocomplete_engine = require('./autocomplete/engine');
+
+var ACTIVE_API = new Api();
+
+function nonValidIndexType(token) {
+ return !(token === "_all" || token[0] !== "_");
+}
+
+function IndexAutocompleteComponent(name, parent, multi_valued) {
+ autocomplete_engine.ListComponent.call(this, name, mappings.getIndices, parent, multi_valued);
+}
+
+IndexAutocompleteComponent.prototype = _.create(
+ autocomplete_engine.ListComponent.prototype,
+ {'constructor': IndexAutocompleteComponent});
+
+(function (cls) {
+ cls.validateTokens = function (tokens) {
+ if (!this.multi_valued && tokens.length > 1) {
+ return false;
+ }
+ return !_.find(tokens, nonValidIndexType);
+ };
+
+ cls.getDefaultTermMeta = function () {
+ return "index"
+ };
+
+ cls.getContextKey = function () {
+ return "indices";
+ };
+})(IndexAutocompleteComponent.prototype);
+
+
+function TypeGenerator(context) {
+ return mappings.getTypes(context.indices);
+}
+
+function TypeAutocompleteComponent(name, parent, multi_valued) {
+ autocomplete_engine.ListComponent.call(this, name, TypeGenerator, parent, multi_valued);
+}
+
+TypeAutocompleteComponent.prototype = _.create(
+ autocomplete_engine.ListComponent.prototype,
+ {'constructor': TypeAutocompleteComponent});
+
+(function (cls) {
+ cls.validateTokens = function (tokens) {
+ if (!this.multi_valued && tokens.length > 1) {
+ return false;
+ }
+
+ return !_.find(tokens, nonValidIndexType);
+ };
+
+ cls.getDefaultTermMeta = function () {
+ return "type"
+ };
+
+ cls.getContextKey = function () {
+ return "types";
+ };
+})(TypeAutocompleteComponent.prototype);
+
+function FieldGenerator(context) {
+ return _.map(mappings.getFields(context.indices, context.types), function (field) {
+ return {name: field.name, meta: field.type};
+ });
+}
+
+function FieldAutocompleteComponent(name, parent, multi_valued) {
+ autocomplete_engine.ListComponent.call(this, name, FieldGenerator, parent, multi_valued);
+}
+
+FieldAutocompleteComponent.prototype = _.create(
+ autocomplete_engine.ListComponent.prototype,
+ {'constructor': FieldAutocompleteComponent});
+
+(function (cls) {
+ cls.validateTokens = function (tokens) {
+ if (!this.multi_valued && tokens.length > 1) {
+ return false;
+ }
+
+ return !_.find(tokens, function (token) {
+ return token.match(/[^\w.?*]/);
+ });
+ };
+
+ cls.getDefaultTermMeta = function () {
+ return "field"
+ };
+
+ cls.getContextKey = function () {
+ return "fields";
+ };
+})(FieldAutocompleteComponent.prototype);
+
+
+function IdAutocompleteComponent(name, parent, multi) {
+ autocomplete_engine.SharedComponent.call(this, name, parent);
+ this.multi_match = multi
+}
+
+IdAutocompleteComponent.prototype = _.create(
+ autocomplete_engine.SharedComponent.prototype,
+ {'constructor': IdAutocompleteComponent});
+
+(function (cls) {
+ cls.match = function (token, context, editor) {
+ if (!token) {
+ return null;
+ }
+ if (!this.multi_match && _.isArray(token)) {
+ return null;
+ }
+ token = _.isArray(token) ? token : [token];
+ if (_.find(token, function (t) {
+ return t.match(/[\/,]/);
+ })) {
+ return null;
+ }
+ var r = Object.getPrototypeOf(cls).match.call(this, token, context, editor);
+ r.context_values = r.context_values || {};
+ r.context_values['id'] = token;
+ return r;
+ };
+})(IdAutocompleteComponent.prototype);
+
+var parametrizedComponentFactories = {
+
+ 'index': function (name, parent, endpoint) {
+ return new IndexAutocompleteComponent(name, parent, false);
+ },
+ 'indices': function (name, parent, endpoint) {
+ return new IndexAutocompleteComponent(name, parent, true);
+ },
+ 'type': function (name, parent, endpoint) {
+ return new TypeAutocompleteComponent(name, parent, false);
+ },
+ 'types': function (name, parent, endpoint) {
+ return new TypeAutocompleteComponent(name, parent, true);
+ },
+ 'id': function (name, parent, endpoint) {
+ return new IdAutocompleteComponent(name, parent);
+ },
+ 'ids': function (name, parent, endpoint) {
+ return new IdAutocompleteComponent(name, parent, true);
+ },
+ 'fields': function (name, parent, endpoint) {
+ return new FieldAutocompleteComponent(name, parent, true);
+ },
+ 'field': function (name, parent, endpoint) {
+ return new FieldAutocompleteComponent(name, parent, false);
+ },
+ 'nodes': function (name, parent, endpoint) {
+ return new autocomplete_engine.ListComponent(name, ["_local", "_master", "data:true", "data:false",
+ "master:true", "master:false"], parent)
+ },
+ 'node': function (name, parent, endpoint) {
+ return new autocomplete_engine.ListComponent(name, [], parent, false)
+ }
+};
+
+
+function expandAliases(indices) {
+ if (indices && indices.length > 0) {
+ indices = mappings.expandAliases(indices);
+ }
+ return indices;
+}
+
+function getUnmatchedEndpointComponents() {
+ return ACTIVE_API.getUnmatchedEndpointComponents();
+}
+
+function getEndpointDescriptionByEndpoint(endpoint) {
+ return ACTIVE_API.getEndpointDescriptionByEndpoint(endpoint)
+}
+
+function getEndpointBodyCompleteComponents(endpoint) {
+ var desc = getEndpointDescriptionByEndpoint(endpoint);
+ if (!desc) {
+ throw new Error("failed to resolve endpoint ['" + endpoint + "']");
+ }
+ return desc.bodyAutocompleteRootComponents;
+}
+
+function getTopLevelUrlCompleteComponents() {
+ return ACTIVE_API.getTopLevelUrlCompleteComponents();
+}
+
+function getGlobalAutocompleteComponents(term, throwOnMissing) {
+ return ACTIVE_API.getGlobalAutocompleteComponents(term, throwOnMissing);
+}
+
+function loadApisFromJson(json, urlParametrizedComponentFactories, bodyParametrizedComponentFactories) {
+ urlParametrizedComponentFactories = urlParametrizedComponentFactories || parametrizedComponentFactories;
+ bodyParametrizedComponentFactories = bodyParametrizedComponentFactories || urlParametrizedComponentFactories;
+ let api = new Api(urlParametrizedComponentFactories, bodyParametrizedComponentFactories);
+ let names = [];
+ _.each(json, function (apiJson, name) {
+ names.unshift(name);
+ _.each(apiJson.globals || {}, function (globalJson, globalName) {
+ api.addGlobalAutocompleteRules(globalName, globalJson);
+ });
+ _.each(apiJson.endpoints || {}, function (endpointJson, endpointName) {
+ api.addEndpointDescription(endpointName, endpointJson);
+ });
+ });
+ api.name = names.join(",");
+ return api;
+}
+
+function setActiveApi(api) {
+ if (_.isString(api)) {
+ $.ajax({
+ url: '../api/console/api_server?sense_version=' + encodeURIComponent('@@SENSE_VERSION') + "&apis=" + encodeURIComponent(api),
+ dataType: "json", // disable automatic guessing
+ }
+ ).then(
+ function (data, textStatus, jqXHR) {
+ setActiveApi(loadApisFromJson(data));
+ },
+ function (jqXHR) {
+ console.log("failed to load API '" + api + "': " + jqXHR.responseText);
+ });
+ return;
+
+ }
+ console.log("setting active api to [" + api.name + "]");
+
+ ACTIVE_API = api;
+}
+
+setActiveApi('es_5_0');
+
+module.exports.setActiveApi = setActiveApi;
+module.exports.getGlobalAutocompleteComponents = getGlobalAutocompleteComponents;
+module.exports.getEndpointDescriptionByEndpoint = getEndpointDescriptionByEndpoint;
+module.exports.getEndpointBodyCompleteComponents = getEndpointBodyCompleteComponents;
+module.exports.getTopLevelUrlCompleteComponents = getTopLevelUrlCompleteComponents;
+module.exports.getUnmatchedEndpointComponents = getUnmatchedEndpointComponents;
+
+module.exports._test = {
+ loadApisFromJson: loadApisFromJson
+};
diff --git a/src/plugins/console/public/src/kb/api.js b/src/plugins/console/public/src/kb/api.js
new file mode 100644
index 0000000000000..6e108495a8afa
--- /dev/null
+++ b/src/plugins/console/public/src/kb/api.js
@@ -0,0 +1,76 @@
+let _ = require('lodash');
+let url_pattern_matcher = require('../autocomplete/url_pattern_matcher');
+let url_params = require('../autocomplete/url_params');
+let body_completer = require('../autocomplete/body_completer');
+
+/**
+ *
+ * @param urlParametrizedComponentFactories a dictionary of factory functions
+ * that will be used as fallback for parametrized path part (i.e., {indices} )
+ * see url_pattern_matcher.UrlPatternMatcher
+ * @constructor
+ * @param bodyParametrizedComponentFactories same as urlParametrizedComponentFactories but used for body compilation
+ */
+function Api(urlParametrizedComponentFactories, bodyParametrizedComponentFactories) {
+ this.globalRules = {};
+ this.endpoints = {};
+ this.urlPatternMatcher = new url_pattern_matcher.UrlPatternMatcher(urlParametrizedComponentFactories);
+ this.globalBodyComponentFactories = bodyParametrizedComponentFactories;
+ this.name = "";
+}
+
+(function (cls) {
+ cls.addGlobalAutocompleteRules = function (parentNode, rules) {
+ this.globalRules[parentNode] = body_completer.compileBodyDescription(
+ "GLOBAL." + parentNode, rules, this.globalBodyComponentFactories);
+ };
+
+ cls.getGlobalAutocompleteComponents = function (term, throwOnMissing) {
+ var result = this.globalRules[term];
+ if (_.isUndefined(result) && (throwOnMissing || _.isUndefined(throwOnMissing))) {
+ throw new Error("failed to resolve global components for ['" + term + "']");
+ }
+ return result;
+ };
+
+ cls.addEndpointDescription = function (endpoint, description) {
+
+ var copiedDescription = {};
+ _.extend(copiedDescription, description || {});
+ _.defaults(copiedDescription, {
+ id: endpoint,
+ patterns: [endpoint],
+ methods: ['GET']
+ });
+ _.each(copiedDescription.patterns, function (p) {
+ this.urlPatternMatcher.addEndpoint(p, copiedDescription);
+ }, this);
+
+ copiedDescription.paramsAutocomplete = new url_params.UrlParams(copiedDescription.url_params);
+ copiedDescription.bodyAutocompleteRootComponents = body_completer.compileBodyDescription(
+ copiedDescription.id, copiedDescription.data_autocomplete_rules, this.globalBodyComponentFactories);
+
+ this.endpoints[endpoint] = copiedDescription;
+ };
+
+ cls.getEndpointDescriptionByEndpoint = function (endpoint) {
+ return this.endpoints[endpoint];
+ };
+
+
+ cls.getTopLevelUrlCompleteComponents = function () {
+ return this.urlPatternMatcher.getTopLevelComponents();
+ };
+
+ cls.getUnmatchedEndpointComponents = function () {
+ return body_completer.globalsOnlyAutocompleteComponents();
+ };
+
+ cls.clear = function () {
+ this.endpoints = {};
+ this.globalRules = {};
+ };
+}(Api.prototype));
+
+
+module.exports = Api;
diff --git a/src/plugins/console/public/src/mappings.js b/src/plugins/console/public/src/mappings.js
new file mode 100644
index 0000000000000..82906c8f4fae8
--- /dev/null
+++ b/src/plugins/console/public/src/mappings.js
@@ -0,0 +1,291 @@
+let $ = require('jquery');
+let _ = require('lodash');
+let utils = require('./utils');
+let es = require('./es');
+let settings = require('./settings');
+
+
+var per_index_types = {};
+var per_alias_indexes = [];
+
+var mappingObj = {};
+
+function expandAliases(indicesOrAliases) {
+ // takes a list of indices or aliases or a string which may be either and returns a list of indices
+ // returns a list for multiple values or a string for a single.
+
+ if (!indicesOrAliases) {
+ return indicesOrAliases;
+ }
+
+ if (typeof indicesOrAliases === "string") {
+ indicesOrAliases = [indicesOrAliases];
+ }
+ indicesOrAliases = $.map(indicesOrAliases, function (iOrA) {
+ if (per_alias_indexes[iOrA]) {
+ return per_alias_indexes[iOrA];
+ }
+ return [iOrA];
+ });
+ var ret = [].concat.apply([], indicesOrAliases);
+ ret.sort();
+ var last;
+ ret = $.map(ret, function (v) {
+ var r = last == v ? null : v;
+ last = v;
+ return r;
+ });
+ return ret.length > 1 ? ret : ret[0];
+}
+
+function getFields(indices, types) {
+ // get fields for indices and types. Both can be a list, a string or null (meaning all).
+ var ret = [];
+ indices = expandAliases(indices);
+ if (typeof indices == "string") {
+
+ var type_dict = per_index_types[indices];
+ if (!type_dict) {
+ return [];
+ }
+
+ if (typeof types == "string") {
+ var f = type_dict[types];
+ ret = f ? f : [];
+ }
+ else {
+ // filter what we need
+ $.each(type_dict, function (type, fields) {
+ if (!types || types.length == 0 || $.inArray(type, types) != -1) {
+ ret.push(fields);
+ }
+ });
+
+ ret = [].concat.apply([], ret);
+ }
+ }
+ else {
+ // multi index mode.
+ $.each(per_index_types, function (index) {
+ if (!indices || indices.length == 0 || $.inArray(index, indices) != -1) {
+ ret.push(getFields(index, types));
+ }
+ });
+ ret = [].concat.apply([], ret);
+ }
+
+ return _.uniq(ret, function (f) {
+ return f.name + ":" + f.type
+ });
+}
+
+function getTypes(indices) {
+ var ret = [];
+ indices = expandAliases(indices);
+ if (typeof indices == "string") {
+ var type_dict = per_index_types[indices];
+ if (!type_dict) {
+ return [];
+ }
+
+ // filter what we need
+ $.each(type_dict, function (type, fields) {
+ ret.push(type);
+ });
+
+ }
+ else {
+ // multi index mode.
+ $.each(per_index_types, function (index) {
+ if (!indices || $.inArray(index, indices) != -1) {
+ ret.push(getTypes(index));
+ }
+ });
+ ret = [].concat.apply([], ret);
+ }
+
+ return _.uniq(ret);
+
+}
+
+
+function getIndices(include_aliases) {
+ var ret = [];
+ $.each(per_index_types, function (index) {
+ ret.push(index);
+ });
+ if (typeof include_aliases === "undefined" ? true : include_aliases) {
+ $.each(per_alias_indexes, function (alias) {
+ ret.push(alias);
+ });
+ }
+ return ret;
+}
+
+function getFieldNamesFromFieldMapping(field_name, field_mapping) {
+ if (field_mapping['enabled'] == false) {
+ return [];
+ }
+ var nested_fields;
+
+ function applyPathSettings(nested_field_names) {
+ var path_type = field_mapping['path'] || "full";
+ if (path_type == "full") {
+ return $.map(nested_field_names, function (f) {
+ f.name = field_name + "." + f.name;
+ return f;
+ });
+ }
+ return nested_field_names;
+ }
+
+ if (field_mapping["properties"]) {
+ // derived object type
+ nested_fields = getFieldNamesFromTypeMapping(field_mapping);
+ return applyPathSettings(nested_fields);
+ }
+
+ var field_type = field_mapping['type'];
+
+ if (field_type === 'multi_field') {
+ nested_fields = $.map(field_mapping['fields'], function (field_mapping, field_name) {
+ return getFieldNamesFromFieldMapping(field_name, field_mapping);
+ });
+
+ return applyPathSettings(nested_fields);
+ }
+
+ var ret = {name: field_name, type: field_type};
+
+ if (field_mapping["index_name"]) {
+ ret.name = field_mapping["index_name"];
+ }
+
+ if (field_mapping["fields"]) {
+ nested_fields = $.map(field_mapping['fields'], function (field_mapping, field_name) {
+ return getFieldNamesFromFieldMapping(field_name, field_mapping);
+ });
+ nested_fields = applyPathSettings(nested_fields);
+ nested_fields.unshift(ret);
+ return nested_fields;
+ }
+
+ return [ret];
+}
+
+function getFieldNamesFromTypeMapping(type_mapping) {
+ var field_list =
+ $.map(type_mapping['properties'] || {}, function (field_mapping, field_name) {
+ return getFieldNamesFromFieldMapping(field_name, field_mapping);
+ });
+
+ // deduping
+ return _.uniq(field_list, function (f) {
+ return f.name + ":" + f.type
+ });
+}
+
+function loadMappings(mappings) {
+ per_index_types = {};
+ $.each(mappings, function (index, index_mapping) {
+ var normalized_index_mappings = {};
+ // 1.0.0 mapping format has changed, extract underlying mapping
+ if (index_mapping.mappings && _.keys(index_mapping).length === 1) {
+ index_mapping = index_mapping.mappings;
+ }
+ $.each(index_mapping, function (type_name, type_mapping) {
+ var field_list = getFieldNamesFromTypeMapping(type_mapping);
+ normalized_index_mappings[type_name] = field_list;
+ });
+ per_index_types[index] = normalized_index_mappings;
+ });
+}
+
+function loadAliases(aliases) {
+ per_alias_indexes = {};
+ $.each(aliases || {}, function (index, index_aliases) {
+ // verify we have an index defined. usefull when mapping loading is disabled
+ per_index_types[index] = per_index_types[index] || {};
+
+ $.each(index_aliases.aliases || {}, function (alias) {
+ if (alias === index) {
+ return;
+ } // alias which is identical to index means no index.
+ var cur_aliases = per_alias_indexes[alias];
+ if (!cur_aliases) {
+ cur_aliases = [];
+ per_alias_indexes[alias] = cur_aliases;
+ }
+ cur_aliases.push(index);
+ });
+ });
+
+ per_alias_indexes['_all'] = getIndices(false);
+}
+
+function clear() {
+ per_index_types = {};
+ per_alias_indexes = {};
+}
+
+function retrieveAutocompleteInfoFromServer() {
+ var autocompleteSettings = settings.getAutocomplete(),
+ mappingPromise, aliasesPromise;
+ if (autocompleteSettings.fields) {
+ mappingPromise = es.send("GET", "_mapping", null, null, true);
+ }
+ else {
+ mappingPromise = new $.Deferred();
+ mappingPromise.resolve();
+ }
+ if (autocompleteSettings.indices) {
+ aliasesPromise = es.send("GET", "_aliases", null, null, true);
+ }
+ else {
+ aliasesPromise = new $.Deferred();
+ aliasesPromise.resolve();
+ }
+
+ $.when(mappingPromise, aliasesPromise)
+ .done(function (mappings, aliases) {
+ if (!mappings) {
+ mappings = {}
+ }
+ else if (mappings[0].length < 10 * 1024 * 1024) {
+ mappings = JSON.parse(mappings[0]);
+ }
+ else {
+ console.warn("mapping size is larger than 10MB (" + mappings[0].length / 1024 / 1024 + " MB). ignoring..");
+ mappings = {};
+ }
+ loadMappings(mappings);
+ if (aliases) {
+ loadAliases(JSON.parse(aliases[0]));
+ } else {
+ aliases = [{}];
+ loadAliases({});
+ }
+ // Trigger an update event with the mappings and aliases
+ $(mappingObj).trigger('update', [mappings[0], aliases[0]]);
+ }
+ )
+ ;
+}
+
+function autocomplete_retriever() {
+ retrieveAutocompleteInfoFromServer();
+ setTimeout(function () {
+ autocomplete_retriever();
+ }, 60000);
+}
+
+module.exports = _.assign(mappingObj, {
+ getFields: getFields,
+ getIndices: getIndices,
+ getTypes: getTypes,
+ loadMappings: loadMappings,
+ loadAliases: loadAliases,
+ expandAliases: expandAliases,
+ clear: clear,
+ retrieveAutocompleteInfoFromServer: retrieveAutocompleteInfoFromServer
+});
diff --git a/src/plugins/console/public/src/output.js b/src/plugins/console/public/src/output.js
new file mode 100644
index 0000000000000..0df74267104ad
--- /dev/null
+++ b/src/plugins/console/public/src/output.js
@@ -0,0 +1,66 @@
+let ace = require('ace');
+let $ = require('jquery');
+let settings = require('./settings');
+let OutputMode = require('./sense_editor/mode/output');
+const smartResize = require('./smart_resize');
+
+var $el = $("#output");
+var output = ace.require('ace/ace').edit($el[0]);
+
+var outputMode = new OutputMode.Mode();
+
+output.resize = smartResize(output);
+output.update = function (val, mode, cb) {
+ if (typeof mode === 'function') {
+ cb = mode;
+ mode = void 0;
+ }
+
+ var session = output.getSession();
+
+ session.setMode(val ? (mode || outputMode) : 'ace/mode/text');
+ session.setValue(val);
+ if (typeof cb === 'function') {
+ setTimeout(cb);
+ }
+};
+
+output.append = function (val, fold_previous, cb) {
+ if (typeof fold_previous === 'function') {
+ cb = fold_previous;
+ fold_previous = true;
+ }
+ if (_.isUndefined(fold_previous)) {
+ fold_previous = true;
+ }
+ var session = output.getSession();
+ var lastLine = session.getLength();
+ if (fold_previous) {
+ output.moveCursorTo(Math.max(0, lastLine - 1), 0);
+ session.toggleFold(false);
+
+ }
+ session.insert({row: lastLine, column: 0}, "\n" + val);
+ output.moveCursorTo(lastLine + 1, 0);
+ if (typeof cb === 'function') {
+ setTimeout(cb);
+ }
+};
+
+output.$el = $el;
+
+(function (session) {
+ session.setMode("ace/mode/text");
+ session.setFoldStyle('markbeginend');
+ session.setTabSize(2);
+ session.setUseWrapMode(true);
+}(output.getSession()));
+
+output.setShowPrintMargin(false);
+output.setReadOnly(true);
+
+if (settings) {
+ settings.applyCurrentSettings(output);
+}
+
+module.exports = output;
diff --git a/src/plugins/console/public/src/sense_editor/editor.js b/src/plugins/console/public/src/sense_editor/editor.js
new file mode 100644
index 0000000000000..184427fe335dc
--- /dev/null
+++ b/src/plugins/console/public/src/sense_editor/editor.js
@@ -0,0 +1,638 @@
+let _ = require('lodash');
+let ace = require('ace');
+let $ = require('jquery');
+let curl = require('../curl');
+let RowParser = require('./row_parser');
+let InputMode = require('./mode/input');
+let utils = require('../utils');
+let es = require('../es');
+import chrome from 'ui/chrome';
+
+const smartResize = require('../smart_resize');
+
+function isInt(x) {
+ return !isNaN(parseInt(x, 10));
+}
+
+function createInstance($el) {
+ var aceEditor = ace.edit($el[0]);
+
+ // we must create a custom class for each instance, so that the prototype
+ // can be the unique aceEditor it extends
+ var CustomSenseEditor = function () {
+ };
+ CustomSenseEditor.prototype = {};
+
+ function bindProp(key) {
+ Object.defineProperty(CustomSenseEditor.prototype, key, {
+ get: function () {
+ return aceEditor[key];
+ },
+ set: function (val) {
+ aceEditor[key] = val;
+ }
+ });
+ }
+
+ // iterate all of the accessible properties/method, on the prototype and beyond
+ for (var key in aceEditor) {
+ switch (typeof aceEditor[key]) {
+ case 'function':
+ CustomSenseEditor.prototype[key] = _.bindKey(aceEditor, key);
+ break;
+ default:
+ bindProp(key);
+ break;
+ }
+ }
+
+ var editor = new CustomSenseEditor();
+ editor.__ace = aceEditor;
+ return editor;
+}
+
+function SenseEditor($el) {
+ var editor = createInstance($el);
+ var CURRENT_REQ_RANGE = null;
+
+ editor.$el = $el;
+ // place holder for an action bar, needs to be set externally.
+ editor.$actions = null;
+
+ // mixin the RowParser
+ editor.parser = new RowParser(editor);
+ editor.resize = smartResize(editor);
+
+ // dirty check for tokenizer state, uses a lot less cycles
+ // than listening for tokenizerUpdate
+ var onceDoneTokenizing = function (func, cancelAlreadyScheduledCalls) {
+ var session = editor.getSession();
+ var timer = false;
+ var checkInterval = 25;
+
+ return function () {
+ var self = this;
+ var args = [].slice.call(arguments, 0);
+
+ if (cancelAlreadyScheduledCalls) {
+ timer = clearTimeout(timer);
+ }
+
+ setTimeout(function check() {
+ if (session.bgTokenizer.running) {
+ timer = setTimeout(check, checkInterval);
+ }
+ else {
+ func.apply(self, args);
+ }
+ });
+ };
+ };
+
+ editor.setShowPrintMargin(false);
+ (function (session) {
+ session.setMode(new InputMode.Mode());
+ session.setFoldStyle('markbeginend');
+ session.setTabSize(2);
+ session.setUseWrapMode(true);
+ })(editor.getSession());
+
+ editor.prevRequestStart = function (rowOrPos) {
+ rowOrPos = _.isUndefined(rowOrPos) || rowOrPos == null ? editor.getCursorPosition() : rowOrPos;
+
+ var curRow = _.isObject(rowOrPos) ? rowOrPos.row : rowOrPos;
+ while (curRow > 0 && !editor.parser.isStartRequestRow(curRow, editor)) curRow--;
+
+ return {
+ row: curRow,
+ column: 0
+ };
+ };
+
+ editor.nextRequestStart = function (rowOrPos) {
+ rowOrPos = _.isUndefined(rowOrPos) || rowOrPos == null ? editor.getCursorPosition() : rowOrPos;
+ var session = editor.getSession();
+ var curRow = _.isObject(rowOrPos) ? rowOrPos.row : rowOrPos;
+ var maxLines = session.getLength();
+ for (; curRow < maxLines - 1; curRow++) {
+ if (editor.parser.isStartRequestRow(curRow, editor)) {
+ break;
+ }
+ }
+ return {
+ row: curRow,
+ column: 0
+ };
+ };
+
+ editor.autoIndent = onceDoneTokenizing(function () {
+ editor.getRequestRange(function (req_range) {
+ if (!req_range) {
+ return;
+ }
+ editor.getRequest(function (parsed_req) {
+ if (parsed_req.data && parsed_req.data.length > 0) {
+ var indent = parsed_req.data.length == 1; // unindent multi docs by default
+ var formatted_data = utils.reformatData(parsed_req.data, indent);
+ if (!formatted_data.changed) {
+ // toggle.
+ formatted_data = utils.reformatData(parsed_req.data, !indent);
+ }
+ parsed_req.data = formatted_data.data;
+
+ editor.replaceRequestRange(parsed_req, req_range);
+ }
+ });
+ });
+ }, true);
+
+ editor.update = function (data, callback) {
+ callback = typeof callback === 'function' ? callback : null;
+ var session = editor.getSession();
+
+ session.setValue(data);
+ if (callback) {
+ // force update of tokens, but not on this thread to allow for ace rendering.
+ setTimeout(function () {
+ var i;
+ for (i = 0; i < session.getLength(); i++) {
+ session.getTokens(i);
+ }
+ callback();
+ });
+ }
+
+ };
+
+ editor.replaceRequestRange = function (newRequest, requestRange) {
+ var text = utils.textFromRequest(newRequest);
+ if (requestRange) {
+ var pos = editor.getCursorPosition();
+ editor.getSession().replace(requestRange, text);
+ var max_row = Math.max(requestRange.start.row + text.split('\n').length - 1, 0);
+ pos.row = Math.min(pos.row, max_row);
+ editor.moveCursorToPosition(pos);
+ // ACE UPGRADE - check if needed - at the moment the above may trigger a selection.
+ editor.clearSelection();
+ }
+ else {
+ // just insert where we are
+ editor.insert(text);
+ }
+ };
+
+ editor.iterForCurrentLoc = function () {
+ var pos = editor.getCursorPosition();
+ return editor.iterForPosition(pos.row, pos.column, editor);
+ };
+
+ editor.iterForPosition = function (row, column) {
+ return new (ace.require("ace/token_iterator").TokenIterator)(editor.getSession(), row, column);
+ };
+
+ editor.getRequestRange = onceDoneTokenizing(function (row, cb) {
+ if (_.isUndefined(cb)) {
+ cb = row;
+ row = null;
+ }
+ if (typeof cb !== 'function') {
+ return;
+ }
+
+ if (editor.parser.isInBetweenRequestsRow(row)) {
+ cb(null);
+ return
+ }
+
+ var reqStart = editor.prevRequestStart(row, editor);
+ var reqEnd = editor.nextRequestEnd(reqStart, editor);
+ cb(new (ace.require("ace/range").Range)(
+ reqStart.row, reqStart.column,
+ reqEnd.row, reqEnd.column
+ ));
+ });
+
+ editor.getEngulfingRequestsRange = onceDoneTokenizing(function (range, cb) {
+ if (_.isUndefined(cb)) {
+ cb = range;
+ range = null;
+ }
+
+ range = range || editor.getSelectionRange();
+
+ var session = editor.getSession();
+ var startRow = range.start.row;
+ var endRow = range.end.row;
+ var maxLine = Math.max(0, session.getLength() - 1);
+
+ // move start row to the previous request start if in body, o.w. forward
+ if (editor.parser.isInBetweenRequestsRow(startRow)) {
+ //for (; startRow <= endRow; startRow++) {
+ // if (editor.parser.isStartRequestRow(startRow)) {
+ // break;
+ // }
+ //}
+ }
+ else {
+ for (; startRow >= 0; startRow--) {
+ if (editor.parser.isStartRequestRow(startRow)) {
+ break;
+ }
+ }
+ }
+
+ if (startRow < 0 || startRow > endRow) {
+ cb(null);
+ return;
+ }
+ // move end row to the previous request end if between requests, o.w. walk forward
+ if (editor.parser.isInBetweenRequestsRow(endRow)) {
+ for (; endRow >= startRow; endRow--) {
+ if (editor.parser.isEndRequestRow(endRow)) {
+ break;
+ }
+ }
+ }
+ else {
+
+ for (; endRow <= maxLine; endRow++) {
+ if (editor.parser.isEndRequestRow(endRow)) {
+ break;
+ }
+ }
+
+ }
+
+ if (endRow < startRow || endRow > maxLine) {
+ cb(null);
+ return;
+ }
+
+ var endColumn = (session.getLine(endRow) || "").replace(/\s+$/, "").length;
+ cb(new (ace.require("ace/range").Range)(startRow, 0, endRow, endColumn));
+ });
+
+
+ editor.getRequestInRange = onceDoneTokenizing(function (range, cb) {
+ var request = {
+ method: "",
+ data: [],
+ url: null,
+ range: range
+ };
+
+ var pos = range.start;
+ var tokenIter = editor.iterForPosition(pos.row, pos.column, editor);
+ var t = tokenIter.getCurrentToken();
+ if (editor.parser.isEmptyToken(t)) {
+ // if the row starts with some spaces, skip them.
+ t = editor.parser.nextNonEmptyToken(tokenIter);
+ }
+ request.method = t.value;
+ t = editor.parser.nextNonEmptyToken(tokenIter);
+ if (!t || t.type == "method") {
+ return null;
+ }
+ request.url = "";
+ while (t && t.type && t.type.indexOf("url") == 0) {
+ request.url += t.value;
+ t = tokenIter.stepForward();
+ }
+ if (editor.parser.isEmptyToken(t)) {
+ // if the url row ends with some spaces, skip them.
+ t = editor.parser.nextNonEmptyToken(tokenIter);
+ }
+
+ var bodyStartRow = (t ? 0 : 1) + tokenIter.getCurrentTokenRow(); // artificially increase end of docs.
+ var dataEndPos;
+ while (bodyStartRow < range.end.row || (
+ bodyStartRow == range.end.row && 0 < range.end.column
+ )) {
+ dataEndPos = editor.nextDataDocEnd({
+ row: bodyStartRow,
+ column: 0
+ });
+ var bodyRange = new (ace.require("ace/range").Range)(
+ bodyStartRow, 0,
+ dataEndPos.row, dataEndPos.column
+ );
+ var data = editor.getSession().getTextRange(bodyRange);
+ request.data.push(data.trim());
+ bodyStartRow = dataEndPos.row + 1;
+ }
+
+ cb(request);
+ });
+
+ editor.getRequestsInRange = function (range, includeNonRequestBlocks, cb) {
+ if (_.isUndefined(includeNonRequestBlocks)) {
+ includeNonRequestBlocks = false;
+ cb = range;
+ range = null;
+ } else if (_.isUndefined(cb)) {
+ cb = includeNonRequestBlocks;
+ includeNonRequestBlocks = false;
+ }
+
+ function explicitRangeToRequests(requestsRange, tempCb) {
+ if (!requestsRange) {
+ tempCb([]);
+ return;
+ }
+
+ var startRow = requestsRange.start.row;
+ var endRow = requestsRange.end.row;
+
+ // move to the next request start (during the second iterations this may not be exactly on a request
+ var currentRow = startRow;
+ for (; currentRow <= endRow; currentRow++) {
+ if (editor.parser.isStartRequestRow(currentRow)) {
+ break;
+ }
+ }
+
+ var nonRequestPrefixBlock = null;
+ if (includeNonRequestBlocks && currentRow != startRow) {
+ nonRequestPrefixBlock = editor.getSession().getLines(startRow, currentRow - 1).join("\n");
+ }
+
+ if (currentRow > endRow) {
+ tempCb(nonRequestPrefixBlock ? [nonRequestPrefixBlock] : []);
+ return;
+ }
+
+ editor.getRequest(currentRow, function (request) {
+ explicitRangeToRequests({
+ start: {
+ row: request.range.end.row + 1
+ },
+ end: {
+ row: requestsRange.end.row
+ }
+ },
+ function (rest_of_requests) {
+ rest_of_requests.unshift(request);
+ if (nonRequestPrefixBlock != null) {
+ rest_of_requests.unshift(nonRequestPrefixBlock);
+ }
+ tempCb(rest_of_requests);
+ }
+ )
+ })
+ }
+
+ editor.getEngulfingRequestsRange(range, function (requestRange) {
+ explicitRangeToRequests(requestRange, cb);
+ });
+ };
+
+ editor.getRequest = onceDoneTokenizing(function (row, cb) {
+ if (_.isUndefined(cb)) {
+ cb = row;
+ row = null;
+ }
+ if (typeof cb !== 'function') {
+ return;
+ }
+ if (editor.parser.isInBetweenRequestsRow(row)) {
+ cb(null);
+ return;
+ }
+ editor.getRequestRange(row, function (range) {
+ editor.getRequestInRange(range, cb);
+ });
+ });
+
+ editor.moveToPreviousRequestEdge = onceDoneTokenizing(function () {
+ var pos = editor.getCursorPosition();
+ for (pos.row--; pos.row > 0 && !editor.parser.isRequestEdge(pos.row); pos.row--) {
+ }
+ editor.moveCursorTo(pos.row, 0);
+ });
+
+ editor.moveToNextRequestEdge = onceDoneTokenizing(function (moveOnlyIfNotOnEdge) {
+ var pos = editor.getCursorPosition();
+ var maxRow = editor.getSession().getLength();
+ if (!moveOnlyIfNotOnEdge) {
+ pos.row++;
+ }
+ for (; pos.row < maxRow && !editor.parser.isRequestEdge(pos.row); pos.row++) {
+ }
+ editor.moveCursorTo(pos.row, 0);
+ });
+
+ editor.nextRequestEnd = function (pos) {
+ pos = pos || editor.getCursorPosition();
+ var session = editor.getSession();
+ var curRow = pos.row;
+ var maxLines = session.getLength();
+ for (; curRow < maxLines - 1; curRow++) {
+ var curRowMode = editor.parser.getRowParseMode(curRow, editor);
+ if ((curRowMode & editor.parser.MODE.REQUEST_END) > 0) {
+ break;
+ }
+ if (curRow != pos.row && (curRowMode & editor.parser.MODE.REQUEST_START) > 0) {
+ break;
+ }
+ }
+
+ var column = (session.getLine(curRow) || "").replace(/\s+$/, "").length;
+
+ return {
+ row: curRow,
+ column: column
+ };
+ };
+
+ editor.nextDataDocEnd = function (pos) {
+ pos = pos || editor.getCursorPosition();
+ var session = editor.getSession();
+ var curRow = pos.row;
+ var maxLines = session.getLength();
+ for (; curRow < maxLines - 1; curRow++) {
+ var curRowMode = editor.parser.getRowParseMode(curRow, editor);
+ if ((curRowMode & RowParser.REQUEST_END) > 0) {
+ break;
+ }
+ if ((curRowMode & editor.parser.MODE.MULTI_DOC_CUR_DOC_END) > 0) {
+ break;
+ }
+ if (curRow != pos.row && (curRowMode & editor.parser.MODE.REQUEST_START) > 0) {
+ break;
+ }
+ }
+
+ var column = (session.getLine(curRow) || "").length;
+
+ return {
+ row: curRow,
+ column: column
+ };
+ };
+
+ // overwrite the actual aceEditor's onPaste method
+ var origOnPaste = editor.__ace.onPaste;
+ editor.__ace.onPaste = function (text) {
+ if (text && curl.detectCURL(text)) {
+ editor.handleCURLPaste(text);
+ return;
+ }
+ origOnPaste.call(this, text);
+ };
+
+ editor.handleCURLPaste = function (text) {
+ var curlInput = curl.parseCURL(text);
+
+ editor.insert(curlInput);
+ };
+
+ editor.highlightCurrentRequestsAndUpdateActionBar = onceDoneTokenizing(function () {
+ var session = editor.getSession();
+ editor.getEngulfingRequestsRange(function (new_current_req_range) {
+ if (new_current_req_range == null && CURRENT_REQ_RANGE == null) {
+ return;
+ }
+ if (new_current_req_range != null && CURRENT_REQ_RANGE != null &&
+ new_current_req_range.start.row == CURRENT_REQ_RANGE.start.row &&
+ new_current_req_range.end.row == CURRENT_REQ_RANGE.end.row
+ ) {
+ // same request, now see if we are on the first line and update the action bar
+ var cursorRow = editor.getCursorPosition().row;
+ if (cursorRow == CURRENT_REQ_RANGE.start.row) {
+ editor.updateActionsBar();
+ }
+ return; // nothing to do..
+ }
+
+ if (CURRENT_REQ_RANGE) {
+ session.removeMarker(CURRENT_REQ_RANGE.marker_id);
+ }
+
+ CURRENT_REQ_RANGE = new_current_req_range;
+ if (CURRENT_REQ_RANGE) {
+ CURRENT_REQ_RANGE.marker_id = session.addMarker(CURRENT_REQ_RANGE, "ace_snippet-marker", "fullLine");
+ }
+ editor.updateActionsBar();
+ });
+ }, true);
+
+ editor.getRequestsAsCURL = function (range, cb) {
+ if (_.isUndefined(cb)) {
+ cb = range;
+ range = null;
+ }
+
+ if (_.isUndefined(cb)) {
+ cb = $.noop;
+ }
+
+ editor.getRequestsInRange(range, true, function (requests) {
+
+ var result = _.map(requests, function requestToCurl(req) {
+
+ if (typeof req === "string") {
+ // no request block
+ return req;
+ }
+
+ var
+ es_path = req.url,
+ es_method = req.method,
+ es_data = req.data;
+
+ const elasticsearchBaseUrl = chrome.getInjected('elasticsearchUrl');
+ var url = es.constructESUrl(elasticsearchBaseUrl, es_path);
+
+ var ret = 'curl -X' + es_method + ' "' + url + '"';
+ if (es_data && es_data.length) {
+ ret += " -d'\n";
+ // since Sense doesn't allow single quote json string any single qoute is within a string.
+ ret += es_data.join("\n").replace(/'/g, '\\"');
+ if (es_data.length > 1) {
+ ret += "\n";
+ } // end with a new line
+ ret += "'";
+ }
+ return ret;
+ });
+
+ cb(result.join("\n"));
+ });
+ };
+
+ editor.getSession().on('tokenizerUpdate', function (e) {
+ editor.highlightCurrentRequestsAndUpdateActionBar();
+ });
+
+ editor.getSession().selection.on('changeCursor', function (e) {
+ editor.highlightCurrentRequestsAndUpdateActionBar();
+ });
+
+ editor.updateActionsBar = (function () {
+ var set = function (top) {
+ if (top == null) {
+ editor.$actions.css('visibility', 'hidden');
+ }
+ else {
+ editor.$actions.css({
+ top: top,
+ visibility: 'visible'
+ });
+ }
+ };
+
+ var hide = function () {
+ set();
+ };
+
+ return function () {
+ if (!editor.$actions) {
+ return;
+ }
+ if (CURRENT_REQ_RANGE) {
+ // elements are positioned relative to the editor's container
+ // pageY is relative to page, so subtract the offset
+ // from pageY to get the new top value
+ var offsetFromPage = editor.$el.offset().top;
+ var startRow = CURRENT_REQ_RANGE.start.row;
+ var startColumn = CURRENT_REQ_RANGE.start.column;
+ var session = editor.session;
+ var firstLine = session.getLine(startRow);
+
+ if (firstLine.length > session.getWrapLimit() - 5) {
+ // overlap first row
+ if (startRow > 0) {
+ startRow--;
+ }
+ else {
+ startRow++;
+ }
+ }
+
+
+ var topOfReq = editor.renderer.textToScreenCoordinates(startRow, startColumn).pageY - offsetFromPage;
+
+ if (topOfReq >= 0) {
+ return set(topOfReq);
+ }
+
+ var bottomOfReq = editor.renderer.textToScreenCoordinates(
+ CURRENT_REQ_RANGE.end.row,
+ CURRENT_REQ_RANGE.end.column
+ ).pageY - offsetFromPage;
+
+ if (bottomOfReq >= 0) {
+ return set(0);
+ }
+ }
+
+ hide();
+ }
+ }());
+
+ editor.getSession().on("changeScrollTop", editor.updateActionsBar);
+
+ return editor;
+}
+
+module.exports = SenseEditor;
diff --git a/src/plugins/console/public/src/sense_editor/mode/input.js b/src/plugins/console/public/src/sense_editor/mode/input.js
new file mode 100644
index 0000000000000..9b197b96740c2
--- /dev/null
+++ b/src/plugins/console/public/src/sense_editor/mode/input.js
@@ -0,0 +1,72 @@
+let ace = require('ace');
+let acequire = require('acequire');
+let mode_json = require('ace/mode-json');
+
+var oop = acequire("ace/lib/oop");
+var TextMode = acequire("ace/mode/text").Mode;
+var MatchingBraceOutdent = acequire("ace/mode/matching_brace_outdent").MatchingBraceOutdent;
+var CstyleBehaviour = acequire("ace/mode/behaviour/cstyle").CstyleBehaviour;
+var CStyleFoldMode = acequire("ace/mode/folding/cstyle").FoldMode;
+var WorkerClient = acequire("ace/worker/worker_client").WorkerClient;
+var AceTokenizer = acequire("ace/tokenizer").Tokenizer;
+
+var HighlightRules = require("./input_highlight_rules").InputHighlightRules;
+
+acequire("ace/config").setModuleUrl("sense_editor/mode/worker", require("file!./worker.js"));
+
+
+var Mode = function () {
+ this.$tokenizer = new AceTokenizer(new HighlightRules().getRules());
+ this.$outdent = new MatchingBraceOutdent();
+ this.$behaviour = new CstyleBehaviour();
+ this.foldingRules = new CStyleFoldMode();
+};
+oop.inherits(Mode, TextMode);
+
+(function () {
+ this.getCompletions = function (editor, session, pos, prefix) {
+ // autocomplete is done by the autocomplete module.
+ return [];
+ };
+
+ this.getNextLineIndent = function (state, line, tab) {
+ var indent = this.$getIndent(line);
+
+ if (state != "double_q_string") {
+ var match = line.match(/^.*[\{\(\[]\s*$/);
+ if (match) {
+ indent += tab;
+ }
+ }
+
+ return indent;
+ };
+
+ this.checkOutdent = function (state, line, input) {
+ return this.$outdent.checkOutdent(line, input);
+ };
+
+ this.autoOutdent = function (state, doc, row) {
+ this.$outdent.autoOutdent(doc, row);
+ };
+
+ this.createWorker = function (session) {
+ var worker = new WorkerClient(["ace", "sense_editor"], "sense_editor/mode/worker", "SenseWorker");
+ worker.attachToDocument(session.getDocument());
+
+
+ worker.on("error", function (e) {
+ session.setAnnotations([e.data]);
+ });
+
+ worker.on("ok", function (anno) {
+ session.setAnnotations(anno.data);
+ });
+
+ return worker;
+ };
+
+
+}).call(Mode.prototype);
+
+module.exports.Mode = Mode;
diff --git a/src/plugins/console/public/src/sense_editor/mode/input_highlight_rules.js b/src/plugins/console/public/src/sense_editor/mode/input_highlight_rules.js
new file mode 100644
index 0000000000000..da256594bfb45
--- /dev/null
+++ b/src/plugins/console/public/src/sense_editor/mode/input_highlight_rules.js
@@ -0,0 +1,155 @@
+let ace = require('ace');
+
+var oop = ace.require("ace/lib/oop");
+var TextHighlightRules = ace.require("ace/mode/text_highlight_rules").TextHighlightRules;
+
+var InputHighlightRules = function () {
+
+ function mergeTokens(/* ... */) {
+ return [].concat.apply([], arguments);
+ }
+
+ function addEOL(tokens, reg, nextIfEOL, normalNext) {
+ if (typeof reg == "object") {
+ reg = reg.source;
+ }
+ return [
+ {token: tokens.concat(["whitespace"]), regex: reg + "(\\s*)$", next: nextIfEOL},
+ {token: tokens, regex: reg, next: normalNext}
+ ];
+ }
+
+ // regexp must not have capturing parentheses. Use (?:) instead.
+ // regexps are ordered -> the first match is used
+ /*jshint -W015 */
+ this.$rules = {
+ "start": mergeTokens([
+ {token: "comment", regex: /^#.*$/},
+ {token: "paren.lparen", regex: "{", next: "json", push: true}
+ ],
+ addEOL(["method"], /([a-zA-Z]+)/, "start", "method_sep")
+ ,
+ [
+ {
+ token: "whitespace",
+ regex: "\\s+"
+ },
+ {
+ token: "text",
+ regex: ".+?"
+ }
+ ]),
+ "method_sep": mergeTokens(
+ addEOL(["whitespace", "url.protocol_host", "url.slash"], /(\s+)(https?:\/\/[^?\/,]+)(\/)/, "start", "url"),
+ addEOL(["whitespace", "url.protocol_host"], /(\s+)(https?:\/\/[^?\/,]+)/, "start", "url"),
+ addEOL(["whitespace", "url.slash"], /(\s+)(\/)/, "start", "url"),
+ addEOL(["whitespace"], /(\s+)/, "start", "url")
+ ),
+ "url": mergeTokens(
+ addEOL(["url.part"], /([^?\/,\s]+)/, "start"),
+ addEOL(["url.comma"], /(,)/, "start"),
+ addEOL(["url.slash"], /(\/)/, "start"),
+ addEOL(["url.questionmark"], /(\?)/, "start", "urlParams")
+ ),
+ "urlParams": mergeTokens(
+ addEOL(["url.param", "url.equal", "url.value"], /([^&=]+)(=)([^&]*)/, "start"),
+ addEOL(["url.param"], /([^&=]+)/, "start"),
+ addEOL(["url.amp"], /(&)/, "start")
+ ),
+
+
+ "json": [
+ {
+ token: "variable", // single line
+ regex: '["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]\\s*(?=:)'
+ },
+ {
+ token: "string", // single line
+ regex: '["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]'
+ },
+ {
+ token: "constant.numeric", // hex
+ regex: "0[xX][0-9a-fA-F]+\\b"
+ },
+ {
+ token: "constant.numeric", // float
+ regex: "[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b"
+ },
+ {
+ token: "constant.language.boolean",
+ regex: "(?:true|false)\\b"
+ },
+ {
+ token: "invalid.illegal", // single quoted strings are not allowed
+ regex: "['](?:(?:\\\\.)|(?:[^'\\\\]))*?[']"
+ },
+ {
+ token: "invalid.illegal", // comments are not allowed
+ regex: "\\/\\/.*$"
+ },
+ {
+ token: "paren.lparen",
+ merge: false,
+ regex: "{",
+ next: "json",
+ push: true
+ },
+ {
+ token: "paren.lparen",
+ merge: false,
+ regex: "[[(]"
+ },
+ {
+ token: "paren.rparen",
+ merge: false,
+ regex: "[\\])]"
+ },
+ {
+ token: "paren.rparen",
+ regex: "}",
+ merge: false,
+ next: "pop"
+ },
+ {
+ token: "punctuation.comma",
+ regex: ","
+ },
+ {
+ token: "punctuation.colon",
+ regex: ":"
+ },
+ {
+ token: "whitespace",
+ regex: "\\s+"
+ },
+ {
+ token: "text",
+ regex: ".+?"
+ }
+ ],
+ "double_q_string": [
+ {
+ token: "string",
+ regex: '[^"]+'
+ },
+ {
+ token: "punctuation.end_quote",
+ regex: '"',
+ next: "json"
+ },
+ {
+ token: "string",
+ regex: "",
+ next: "json"
+ }
+ ]
+ };
+
+ if (this.constructor === InputHighlightRules) {
+ this.normalizeRules();
+ }
+};
+
+oop.inherits(InputHighlightRules, TextHighlightRules);
+
+module.exports.InputHighlightRules = InputHighlightRules;
diff --git a/src/plugins/console/public/src/sense_editor/mode/output.js b/src/plugins/console/public/src/sense_editor/mode/output.js
new file mode 100644
index 0000000000000..d4edebc17745d
--- /dev/null
+++ b/src/plugins/console/public/src/sense_editor/mode/output.js
@@ -0,0 +1,32 @@
+let ace = require('ace');
+let acequire = require('acequire');
+let mode_json = require('ace/mode-json');
+let output_highlighting_rules = require('./output_highlight_rules');
+
+
+var oop = ace.require("ace/lib/oop");
+var JSONMode = ace.require("ace/mode/json").Mode;
+var HighlightRules = require("./output_highlight_rules").OutputJsonHighlightRules;
+var MatchingBraceOutdent = ace.require("ace/mode/matching_brace_outdent").MatchingBraceOutdent;
+var CstyleBehaviour = ace.require("ace/mode/behaviour/cstyle").CstyleBehaviour;
+var CStyleFoldMode = ace.require("ace/mode/folding/cstyle").FoldMode;
+var WorkerClient = ace.require("ace/worker/worker_client").WorkerClient;
+var AceTokenizer = ace.require("ace/tokenizer").Tokenizer;
+
+var Mode = function () {
+ this.$tokenizer = new AceTokenizer(new HighlightRules().getRules());
+ this.$outdent = new MatchingBraceOutdent();
+ this.$behaviour = new CstyleBehaviour();
+ this.foldingRules = new CStyleFoldMode();
+};
+oop.inherits(Mode, JSONMode);
+
+(function () {
+ this.createWorker = function (session) {
+ return null;
+ };
+
+ this.$id = "sense/mode/input";
+}).call(Mode.prototype);
+
+module.exports.Mode = Mode;
diff --git a/src/plugins/console/public/src/sense_editor/mode/output_highlight_rules.js b/src/plugins/console/public/src/sense_editor/mode/output_highlight_rules.js
new file mode 100644
index 0000000000000..97b76cf71aaf9
--- /dev/null
+++ b/src/plugins/console/public/src/sense_editor/mode/output_highlight_rules.js
@@ -0,0 +1,24 @@
+let ace = require('ace');
+let ace_mode_json = require('ace/mode-json');
+
+var oop = ace.require("ace/lib/oop");
+var JsonHighlightRules = ace.require("ace/mode/json_highlight_rules").JsonHighlightRules;
+
+var OutputJsonHighlightRules = function () {
+
+ // regexp must not have capturing parentheses. Use (?:) instead.
+ // regexps are ordered -> the first match is used
+ this.$rules = new JsonHighlightRules().getRules();
+
+ this.$rules.start.unshift(
+ {
+ "token": "comment",
+ "regex": "#.*$"
+ }
+ );
+
+};
+
+oop.inherits(OutputJsonHighlightRules, JsonHighlightRules);
+
+module.exports.OutputJsonHighlightRules = OutputJsonHighlightRules;
diff --git a/src/plugins/console/public/src/sense_editor/mode/worker.js b/src/plugins/console/public/src/sense_editor/mode/worker.js
new file mode 100644
index 0000000000000..4371f7ff14b8d
--- /dev/null
+++ b/src/plugins/console/public/src/sense_editor/mode/worker.js
@@ -0,0 +1,1793 @@
+(function (window) {
+ "use strict";
+
+ if (typeof window.window !== "undefined" && window.document) {
+ return;
+ }
+
+ window.console = function () {
+ var msgs = Array.prototype.slice.call(arguments, 0);
+ window.postMessage({type: "log", data: msgs});
+ };
+ window.console.error =
+ window.console.warn =
+ window.console.log =
+ window.console.trace = window.console;
+
+ window.window = window;
+ window.ace = window;
+
+ window.onerror = function (message, file, line, col, err) {
+ console.error("Worker " + err.stack);
+ };
+
+ window.normalizeModule = function (parentId, moduleName) {
+ if (moduleName.indexOf("!") !== -1) {
+ var chunks = moduleName.split("!");
+ return window.normalizeModule(parentId, chunks[0]) + "!" + window.normalizeModule(parentId, chunks[1]);
+ }
+ if (moduleName.charAt(0) == ".") {
+ var base = parentId.split("/").slice(0, -1).join("/");
+ moduleName = (base ? base + "/" : "") + moduleName;
+
+ var previous;
+ while (moduleName.indexOf(".") !== -1 && previous != moduleName) {
+ previous = moduleName;
+ moduleName = moduleName.replace(/^\.\//, "").replace(/\/\.\//, "/").replace(/[^\/]+\/\.\.\//, "");
+ }
+ }
+
+ return moduleName;
+ };
+
+ window.require = function (parentId, id) {
+ if (!id) {
+ id = parentId
+ parentId = null;
+ }
+ if (!id.charAt)
+ throw new Error("worker.js require() accepts only (parentId, id) as arguments");
+
+ id = window.normalizeModule(parentId, id);
+
+ var module = window.require.modules[id];
+ if (module) {
+ if (!module.initialized) {
+ module.initialized = true;
+ module.exports = module.factory().exports;
+ }
+ return module.exports;
+ }
+
+ var chunks = id.split("/");
+ if (!window.require.tlns)
+ return console.log("unable to load " + id);
+ chunks[0] = window.require.tlns[chunks[0]] || chunks[0];
+ var path = chunks.join("/") + ".js";
+
+ window.require.id = id;
+ window.importScripts(path);
+ return window.require(parentId, id);
+ };
+ window.require.modules = {};
+ window.require.tlns = {};
+
+ window.define = function (id, deps, factory) {
+ if (arguments.length == 2) {
+ factory = deps;
+ if (typeof id != "string") {
+ deps = id;
+ id = window.require.id;
+ }
+ } else if (arguments.length == 1) {
+ factory = id;
+ deps = []
+ id = window.require.id;
+ }
+
+ if (!deps.length)
+ deps = ['require', 'exports', 'module']
+
+ if (id.indexOf("text!") === 0)
+ return;
+
+ var req = function (childId) {
+ return window.require(id, childId);
+ };
+
+ window.require.modules[id] = {
+ exports: {},
+ factory: function () {
+ var module = this;
+ var returnExports = factory.apply(this, deps.map(function (dep) {
+ switch (dep) {
+ case 'require':
+ return req
+ case 'exports':
+ return module.exports
+ case 'module':
+ return module
+ default:
+ return req(dep)
+ }
+ }));
+ if (returnExports)
+ module.exports = returnExports;
+ return module;
+ }
+ };
+ };
+ window.define.amd = {}
+
+ window.initBaseUrls = function initBaseUrls(topLevelNamespaces) {
+ require.tlns = topLevelNamespaces;
+ }
+
+ window.initSender = function initSender() {
+
+ var EventEmitter = window.require("ace/lib/event_emitter").EventEmitter;
+ var oop = window.require("ace/lib/oop");
+
+ var Sender = function () {
+ };
+
+ (function () {
+
+ oop.implement(this, EventEmitter);
+
+ this.callback = function (data, callbackId) {
+ window.postMessage({
+ type: "call",
+ id: callbackId,
+ data: data
+ });
+ };
+
+ this.emit = function (name, data) {
+ window.postMessage({
+ type: "event",
+ name: name,
+ data: data
+ });
+ };
+
+ }).call(Sender.prototype);
+
+ return new Sender();
+ }
+
+ window.main = null;
+ window.sender = null;
+
+ window.onmessage = function (e) {
+ var msg = e.data;
+ if (msg.command) {
+ if (window.main[msg.command])
+ window.main[msg.command].apply(window.main, msg.args);
+ else
+ throw new Error("Unknown command:" + msg.command);
+ }
+ else if (msg.init) {
+ window.initBaseUrls(msg.tlns);
+ require("ace/lib/es5-shim");
+ window.sender = window.initSender();
+ var clazz = require(msg.module)[msg.classname];
+ /*jshint -W055 */
+ window.main = new clazz(window.sender);
+ }
+ else if (msg.event && window.sender) {
+ window.sender._emit(msg.event, msg.data);
+ }
+ };
+})(this);// https://github.com/kriskowal/es5-shim
+
+define('ace/lib/oop', ['require', 'exports', 'module' ], function (require, exports, module) {
+ "use strict";
+
+ exports.inherits = function (ctor, superCtor) {
+ ctor.super_ = superCtor;
+ ctor.prototype = Object.create(superCtor.prototype, {
+ constructor: {
+ value: ctor,
+ enumerable: false,
+ writable: true,
+ configurable: true
+ }
+ });
+ };
+
+ exports.mixin = function (obj, mixin) {
+ for (var key in mixin) {
+ obj[key] = mixin[key];
+ }
+ return obj;
+ };
+
+ exports.implement = function (proto, mixin) {
+ exports.mixin(proto, mixin);
+ };
+
+});
+define('ace/worker/mirror', ['require', 'exports', 'module' , 'ace/document', 'ace/lib/lang'], function (require, exports, module) {
+ "use strict";
+
+ var Document = require("../document").Document;
+ var lang = require("../lib/lang");
+
+ var Mirror = exports.Mirror = function (sender) {
+ this.sender = sender;
+ var doc = this.doc = new Document("");
+
+ var deferredUpdate = this.deferredUpdate = lang.delayedCall(this.onUpdate.bind(this));
+
+ var _self = this;
+ sender.on("change", function (e) {
+ doc.applyDeltas(e.data);
+ if (_self.$timeout)
+ return deferredUpdate.schedule(_self.$timeout);
+ _self.onUpdate();
+ });
+ };
+
+ (function () {
+
+ this.$timeout = 500;
+
+ this.setTimeout = function (timeout) {
+ this.$timeout = timeout;
+ };
+
+ this.setValue = function (value) {
+ this.doc.setValue(value);
+ this.deferredUpdate.schedule(this.$timeout);
+ };
+
+ this.getValue = function (callbackId) {
+ this.sender.callback(this.doc.getValue(), callbackId);
+ };
+
+ this.onUpdate = function () {
+ };
+
+ this.isPending = function () {
+ return this.deferredUpdate.isPending();
+ };
+
+ }).call(Mirror.prototype);
+
+});
+
+define('ace/document', ['require', 'exports', 'module' , 'ace/lib/oop', 'ace/lib/event_emitter',
+ 'ace/range', 'ace/anchor'], function (require, exports, module) {
+ "use strict";
+
+ var oop = require("./lib/oop");
+ var EventEmitter = require("./lib/event_emitter").EventEmitter;
+ var Range = require("./range").Range;
+ var Anchor = require("./anchor").Anchor;
+
+ var Document = function (text) {
+ this.$lines = [];
+ if (text.length == 0) {
+ this.$lines = [""];
+ } else if (Array.isArray(text)) {
+ this._insertLines(0, text);
+ } else {
+ this.insert({row: 0, column: 0}, text);
+ }
+ };
+
+ (function () {
+
+ oop.implement(this, EventEmitter);
+ this.setValue = function (text) {
+ var len = this.getLength();
+ this.remove(new Range(0, 0, len, this.getLine(len - 1).length));
+ this.insert({row: 0, column: 0}, text);
+ };
+ this.getValue = function () {
+ return this.getAllLines().join(this.getNewLineCharacter());
+ };
+ this.createAnchor = function (row, column) {
+ return new Anchor(this, row, column);
+ };
+ if ("aaa".split(/a/).length == 0)
+ this.$split = function (text) {
+ return text.replace(/\r\n|\r/g, "\n").split("\n");
+ }
+ else
+ this.$split = function (text) {
+ return text.split(/\r\n|\r|\n/);
+ };
+
+
+ this.$detectNewLine = function (text) {
+ var match = text.match(/^.*?(\r\n|\r|\n)/m);
+ this.$autoNewLine = match ? match[1] : "\n";
+ };
+ this.getNewLineCharacter = function () {
+ switch (this.$newLineMode) {
+ case "windows":
+ return "\r\n";
+ case "unix":
+ return "\n";
+ default:
+ return this.$autoNewLine;
+ }
+ };
+
+ this.$autoNewLine = "\n";
+ this.$newLineMode = "auto";
+ this.setNewLineMode = function (newLineMode) {
+ if (this.$newLineMode === newLineMode)
+ return;
+
+ this.$newLineMode = newLineMode;
+ };
+ this.getNewLineMode = function () {
+ return this.$newLineMode;
+ };
+ this.isNewLine = function (text) {
+ return (text == "\r\n" || text == "\r" || text == "\n");
+ };
+ this.getLine = function (row) {
+ return this.$lines[row] || "";
+ };
+ this.getLines = function (firstRow, lastRow) {
+ return this.$lines.slice(firstRow, lastRow + 1);
+ };
+ this.getAllLines = function () {
+ return this.getLines(0, this.getLength());
+ };
+ this.getLength = function () {
+ return this.$lines.length;
+ };
+ this.getTextRange = function (range) {
+ if (range.start.row == range.end.row) {
+ return this.getLine(range.start.row)
+ .substring(range.start.column, range.end.column);
+ }
+ var lines = this.getLines(range.start.row, range.end.row);
+ lines[0] = (lines[0] || "").substring(range.start.column);
+ var l = lines.length - 1;
+ if (range.end.row - range.start.row == l)
+ lines[l] = lines[l].substring(0, range.end.column);
+ return lines.join(this.getNewLineCharacter());
+ };
+
+ this.$clipPosition = function (position) {
+ var length = this.getLength();
+ if (position.row >= length) {
+ position.row = Math.max(0, length - 1);
+ position.column = this.getLine(length - 1).length;
+ } else if (position.row < 0)
+ position.row = 0;
+ return position;
+ };
+ this.insert = function (position, text) {
+ if (!text || text.length === 0)
+ return position;
+
+ position = this.$clipPosition(position);
+ if (this.getLength() <= 1)
+ this.$detectNewLine(text);
+
+ var lines = this.$split(text);
+ var firstLine = lines.splice(0, 1)[0];
+ var lastLine = lines.length == 0 ? null : lines.splice(lines.length - 1, 1)[0];
+
+ position = this.insertInLine(position, firstLine);
+ if (lastLine !== null) {
+ position = this.insertNewLine(position); // terminate first line
+ position = this._insertLines(position.row, lines);
+ position = this.insertInLine(position, lastLine || "");
+ }
+ return position;
+ };
+ this.insertLines = function (row, lines) {
+ if (row >= this.getLength())
+ return this.insert({row: row, column: 0}, "\n" + lines.join("\n"));
+ return this._insertLines(Math.max(row, 0), lines);
+ };
+ this._insertLines = function (row, lines) {
+ if (lines.length == 0)
+ return {row: row, column: 0};
+ var end;
+ if (lines.length > 0xFFFF) {
+ end = this._insertLines(row, lines.slice(0xFFFF));
+ lines = lines.slice(0, 0xFFFF);
+ }
+
+ var args = [row, 0];
+ args.push.apply(args, lines);
+ this.$lines.splice.apply(this.$lines, args);
+
+ var range = new Range(row, 0, row + lines.length, 0);
+ var delta = {
+ action: "insertLines",
+ range: range,
+ lines: lines
+ };
+ this._emit("change", { data: delta });
+ return end || range.end;
+ };
+ this.insertNewLine = function (position) {
+ position = this.$clipPosition(position);
+ var line = this.$lines[position.row] || "";
+
+ this.$lines[position.row] = line.substring(0, position.column);
+ this.$lines.splice(position.row + 1, 0, line.substring(position.column, line.length));
+
+ var end = {
+ row: position.row + 1,
+ column: 0
+ };
+
+ var delta = {
+ action: "insertText",
+ range: Range.fromPoints(position, end),
+ text: this.getNewLineCharacter()
+ };
+ this._emit("change", { data: delta });
+
+ return end;
+ };
+ this.insertInLine = function (position, text) {
+ if (text.length == 0)
+ return position;
+
+ var line = this.$lines[position.row] || "";
+
+ this.$lines[position.row] = line.substring(0, position.column) + text
+ + line.substring(position.column);
+
+ var end = {
+ row: position.row,
+ column: position.column + text.length
+ };
+
+ var delta = {
+ action: "insertText",
+ range: Range.fromPoints(position, end),
+ text: text
+ };
+ this._emit("change", { data: delta });
+
+ return end;
+ };
+ this.remove = function (range) {
+ if (!range instanceof Range)
+ range = Range.fromPoints(range.start, range.end);
+ range.start = this.$clipPosition(range.start);
+ range.end = this.$clipPosition(range.end);
+
+ if (range.isEmpty())
+ return range.start;
+
+ var firstRow = range.start.row;
+ var lastRow = range.end.row;
+
+ if (range.isMultiLine()) {
+ var firstFullRow = range.start.column == 0 ? firstRow : firstRow + 1;
+ var lastFullRow = lastRow - 1;
+
+ if (range.end.column > 0)
+ this.removeInLine(lastRow, 0, range.end.column);
+
+ if (lastFullRow >= firstFullRow)
+ this._removeLines(firstFullRow, lastFullRow);
+
+ if (firstFullRow != firstRow) {
+ this.removeInLine(firstRow, range.start.column, this.getLine(firstRow).length);
+ this.removeNewLine(range.start.row);
+ }
+ }
+ else {
+ this.removeInLine(firstRow, range.start.column, range.end.column);
+ }
+ return range.start;
+ };
+ this.removeInLine = function (row, startColumn, endColumn) {
+ if (startColumn == endColumn)
+ return;
+
+ var range = new Range(row, startColumn, row, endColumn);
+ var line = this.getLine(row);
+ var removed = line.substring(startColumn, endColumn);
+ var newLine = line.substring(0, startColumn) + line.substring(endColumn, line.length);
+ this.$lines.splice(row, 1, newLine);
+
+ var delta = {
+ action: "removeText",
+ range: range,
+ text: removed
+ };
+ this._emit("change", { data: delta });
+ return range.start;
+ };
+ this.removeLines = function (firstRow, lastRow) {
+ if (firstRow < 0 || lastRow >= this.getLength())
+ return this.remove(new Range(firstRow, 0, lastRow + 1, 0));
+ return this._removeLines(firstRow, lastRow);
+ };
+
+ this._removeLines = function (firstRow, lastRow) {
+ var range = new Range(firstRow, 0, lastRow + 1, 0);
+ var removed = this.$lines.splice(firstRow, lastRow - firstRow + 1);
+
+ var delta = {
+ action: "removeLines",
+ range: range,
+ nl: this.getNewLineCharacter(),
+ lines: removed
+ };
+ this._emit("change", { data: delta });
+ return removed;
+ };
+ this.removeNewLine = function (row) {
+ var firstLine = this.getLine(row);
+ var secondLine = this.getLine(row + 1);
+
+ var range = new Range(row, firstLine.length, row + 1, 0);
+ var line = firstLine + secondLine;
+
+ this.$lines.splice(row, 2, line);
+
+ var delta = {
+ action: "removeText",
+ range: range,
+ text: this.getNewLineCharacter()
+ };
+ this._emit("change", { data: delta });
+ };
+ this.replace = function (range, text) {
+ if (!range instanceof Range)
+ range = Range.fromPoints(range.start, range.end);
+ if (text.length == 0 && range.isEmpty())
+ return range.start;
+ if (text == this.getTextRange(range))
+ return range.end;
+
+ this.remove(range);
+ var end;
+ if (text) {
+ end = this.insert(range.start, text);
+ }
+ else {
+ end = range.start;
+ }
+
+ return end;
+ };
+ this.applyDeltas = function (deltas) {
+ for (var i = 0; i < deltas.length; i++) {
+ var delta = deltas[i];
+ var range = Range.fromPoints(delta.range.start, delta.range.end);
+
+ if (delta.action == "insertLines")
+ this.insertLines(range.start.row, delta.lines);
+ else if (delta.action == "insertText")
+ this.insert(range.start, delta.text);
+ else if (delta.action == "removeLines")
+ this._removeLines(range.start.row, range.end.row - 1);
+ else if (delta.action == "removeText")
+ this.remove(range);
+ }
+ };
+ this.revertDeltas = function (deltas) {
+ for (var i = deltas.length - 1; i >= 0; i--) {
+ var delta = deltas[i];
+
+ var range = Range.fromPoints(delta.range.start, delta.range.end);
+
+ if (delta.action == "insertLines")
+ this._removeLines(range.start.row, range.end.row - 1);
+ else if (delta.action == "insertText")
+ this.remove(range);
+ else if (delta.action == "removeLines")
+ this._insertLines(range.start.row, delta.lines);
+ else if (delta.action == "removeText")
+ this.insert(range.start, delta.text);
+ }
+ };
+ this.indexToPosition = function (index, startRow) {
+ var lines = this.$lines || this.getAllLines();
+ var newlineLength = this.getNewLineCharacter().length;
+ for (var i = startRow || 0, l = lines.length; i < l; i++) {
+ index -= lines[i].length + newlineLength;
+ if (index < 0)
+ return {row: i, column: index + lines[i].length + newlineLength};
+ }
+ return {row: l - 1, column: lines[l - 1].length};
+ };
+ this.positionToIndex = function (pos, startRow) {
+ var lines = this.$lines || this.getAllLines();
+ var newlineLength = this.getNewLineCharacter().length;
+ var index = 0;
+ var row = Math.min(pos.row, lines.length);
+ for (var i = startRow || 0; i < row; ++i)
+ index += lines[i].length + newlineLength;
+
+ return index + pos.column;
+ };
+
+ }).call(Document.prototype);
+
+ exports.Document = Document;
+});
+
+define('ace/lib/event_emitter', ['require', 'exports', 'module' ], function (require, exports, module) {
+ "use strict";
+
+ var EventEmitter = {};
+ var stopPropagation = function () {
+ this.propagationStopped = true;
+ };
+ var preventDefault = function () {
+ this.defaultPrevented = true;
+ };
+
+ EventEmitter._emit =
+ EventEmitter._dispatchEvent = function (eventName, e) {
+ this._eventRegistry || (this._eventRegistry = {});
+ this._defaultHandlers || (this._defaultHandlers = {});
+
+ var listeners = this._eventRegistry[eventName] || [];
+ var defaultHandler = this._defaultHandlers[eventName];
+ if (!listeners.length && !defaultHandler)
+ return;
+
+ if (typeof e != "object" || !e)
+ e = {};
+
+ if (!e.type)
+ e.type = eventName;
+ if (!e.stopPropagation)
+ e.stopPropagation = stopPropagation;
+ if (!e.preventDefault)
+ e.preventDefault = preventDefault;
+
+ listeners = listeners.slice();
+ for (var i = 0; i < listeners.length; i++) {
+ listeners[i](e, this);
+ if (e.propagationStopped)
+ break;
+ }
+
+ if (defaultHandler && !e.defaultPrevented)
+ return defaultHandler(e, this);
+ };
+
+
+ EventEmitter._signal = function (eventName, e) {
+ var listeners = (this._eventRegistry || {})[eventName];
+ if (!listeners)
+ return;
+ listeners = listeners.slice();
+ for (var i = 0; i < listeners.length; i++)
+ listeners[i](e, this);
+ };
+
+ EventEmitter.once = function (eventName, callback) {
+ var _self = this;
+ callback && this.addEventListener(eventName, function newCallback() {
+ _self.removeEventListener(eventName, newCallback);
+ callback.apply(null, arguments);
+ });
+ };
+
+
+ EventEmitter.setDefaultHandler = function (eventName, callback) {
+ var handlers = this._defaultHandlers
+ if (!handlers)
+ handlers = this._defaultHandlers = {_disabled_: {}};
+
+ if (handlers[eventName]) {
+ var old = handlers[eventName];
+ var disabled = handlers._disabled_[eventName];
+ if (!disabled)
+ handlers._disabled_[eventName] = disabled = [];
+ disabled.push(old);
+ var i = disabled.indexOf(callback);
+ if (i != -1)
+ disabled.splice(i, 1);
+ }
+ handlers[eventName] = callback;
+ };
+ EventEmitter.removeDefaultHandler = function (eventName, callback) {
+ var handlers = this._defaultHandlers
+ if (!handlers)
+ return;
+ var disabled = handlers._disabled_[eventName];
+
+ if (handlers[eventName] == callback) {
+ var old = handlers[eventName];
+ if (disabled)
+ this.setDefaultHandler(eventName, disabled.pop());
+ } else if (disabled) {
+ var i = disabled.indexOf(callback);
+ if (i != -1)
+ disabled.splice(i, 1);
+ }
+ };
+
+ EventEmitter.on =
+ EventEmitter.addEventListener = function (eventName, callback, capturing) {
+ this._eventRegistry = this._eventRegistry || {};
+
+ var listeners = this._eventRegistry[eventName];
+ if (!listeners)
+ listeners = this._eventRegistry[eventName] = [];
+
+ if (listeners.indexOf(callback) == -1)
+ listeners[capturing ? "unshift" : "push"](callback);
+ return callback;
+ };
+
+ EventEmitter.off =
+ EventEmitter.removeListener =
+ EventEmitter.removeEventListener = function (eventName, callback) {
+ this._eventRegistry = this._eventRegistry || {};
+
+ var listeners = this._eventRegistry[eventName];
+ if (!listeners)
+ return;
+
+ var index = listeners.indexOf(callback);
+ if (index !== -1)
+ listeners.splice(index, 1);
+ };
+
+ EventEmitter.removeAllListeners = function (eventName) {
+ if (this._eventRegistry) this._eventRegistry[eventName] = [];
+ };
+
+ exports.EventEmitter = EventEmitter;
+
+});
+
+define('ace/range', ['require', 'exports', 'module' ], function (require, exports, module) {
+ "use strict";
+
+ var comparePoints = function (p1, p2) {
+ return p1.row - p2.row || p1.column - p2.column;
+ };
+ var Range = function (startRow, startColumn, endRow, endColumn) {
+ this.start = {
+ row: startRow,
+ column: startColumn
+ };
+
+ this.end = {
+ row: endRow,
+ column: endColumn
+ };
+ };
+
+ (function () {
+ this.isEqual = function (range) {
+ return this.start.row === range.start.row &&
+ this.end.row === range.end.row &&
+ this.start.column === range.start.column &&
+ this.end.column === range.end.column;
+ };
+ this.toString = function () {
+ return ("Range: [" + this.start.row + "/" + this.start.column +
+ "] -> [" + this.end.row + "/" + this.end.column + "]");
+ };
+
+ this.contains = function (row, column) {
+ return this.compare(row, column) == 0;
+ };
+ this.compareRange = function (range) {
+ var cmp,
+ end = range.end,
+ start = range.start;
+
+ cmp = this.compare(end.row, end.column);
+ if (cmp == 1) {
+ cmp = this.compare(start.row, start.column);
+ if (cmp == 1) {
+ return 2;
+ } else if (cmp == 0) {
+ return 1;
+ } else {
+ return 0;
+ }
+ } else if (cmp == -1) {
+ return -2;
+ } else {
+ cmp = this.compare(start.row, start.column);
+ if (cmp == -1) {
+ return -1;
+ } else if (cmp == 1) {
+ return 42;
+ } else {
+ return 0;
+ }
+ }
+ };
+ this.comparePoint = function (p) {
+ return this.compare(p.row, p.column);
+ };
+ this.containsRange = function (range) {
+ return this.comparePoint(range.start) == 0 && this.comparePoint(range.end) == 0;
+ };
+ this.intersects = function (range) {
+ var cmp = this.compareRange(range);
+ return (cmp == -1 || cmp == 0 || cmp == 1);
+ };
+ this.isEnd = function (row, column) {
+ return this.end.row == row && this.end.column == column;
+ };
+ this.isStart = function (row, column) {
+ return this.start.row == row && this.start.column == column;
+ };
+ this.setStart = function (row, column) {
+ if (typeof row == "object") {
+ this.start.column = row.column;
+ this.start.row = row.row;
+ } else {
+ this.start.row = row;
+ this.start.column = column;
+ }
+ };
+ this.setEnd = function (row, column) {
+ if (typeof row == "object") {
+ this.end.column = row.column;
+ this.end.row = row.row;
+ } else {
+ this.end.row = row;
+ this.end.column = column;
+ }
+ };
+ this.inside = function (row, column) {
+ if (this.compare(row, column) == 0) {
+ if (this.isEnd(row, column) || this.isStart(row, column)) {
+ return false;
+ } else {
+ return true;
+ }
+ }
+ return false;
+ };
+ this.insideStart = function (row, column) {
+ if (this.compare(row, column) == 0) {
+ if (this.isEnd(row, column)) {
+ return false;
+ } else {
+ return true;
+ }
+ }
+ return false;
+ };
+ this.insideEnd = function (row, column) {
+ if (this.compare(row, column) == 0) {
+ if (this.isStart(row, column)) {
+ return false;
+ } else {
+ return true;
+ }
+ }
+ return false;
+ };
+ this.compare = function (row, column) {
+ if (!this.isMultiLine()) {
+ if (row === this.start.row) {
+ return column < this.start.column ? -1 : (column > this.end.column ? 1 : 0);
+ }
+ }
+
+ if (row < this.start.row)
+ return -1;
+
+ if (row > this.end.row)
+ return 1;
+
+ if (this.start.row === row)
+ return column >= this.start.column ? 0 : -1;
+
+ if (this.end.row === row)
+ return column <= this.end.column ? 0 : 1;
+
+ return 0;
+ };
+ this.compareStart = function (row, column) {
+ if (this.start.row == row && this.start.column == column) {
+ return -1;
+ } else {
+ return this.compare(row, column);
+ }
+ };
+ this.compareEnd = function (row, column) {
+ if (this.end.row == row && this.end.column == column) {
+ return 1;
+ } else {
+ return this.compare(row, column);
+ }
+ };
+ this.compareInside = function (row, column) {
+ if (this.end.row == row && this.end.column == column) {
+ return 1;
+ } else if (this.start.row == row && this.start.column == column) {
+ return -1;
+ } else {
+ return this.compare(row, column);
+ }
+ };
+ this.clipRows = function (firstRow, lastRow) {
+ var start, end;
+ if (this.end.row > lastRow)
+ end = {row: lastRow + 1, column: 0};
+ else if (this.end.row < firstRow)
+ end = {row: firstRow, column: 0};
+
+ if (this.start.row > lastRow)
+ start = {row: lastRow + 1, column: 0};
+ else if (this.start.row < firstRow)
+ start = {row: firstRow, column: 0};
+
+ return Range.fromPoints(start || this.start, end || this.end);
+ };
+ this.extend = function (row, column) {
+ var cmp = this.compare(row, column);
+ var start, end;
+ if (cmp == 0)
+ return this;
+ else if (cmp == -1)
+ start = {row: row, column: column};
+ else
+ end = {row: row, column: column};
+
+ return Range.fromPoints(start || this.start, end || this.end);
+ };
+
+ this.isEmpty = function () {
+ return (this.start.row === this.end.row && this.start.column === this.end.column);
+ };
+ this.isMultiLine = function () {
+ return (this.start.row !== this.end.row);
+ };
+ this.clone = function () {
+ return Range.fromPoints(this.start, this.end);
+ };
+ this.collapseRows = function () {
+ if (this.end.column == 0)
+ return new Range(this.start.row, 0, Math.max(this.start.row, this.end.row - 1), 0)
+ else
+ return new Range(this.start.row, 0, this.end.row, 0)
+ };
+ this.toScreenRange = function (session) {
+ var screenPosStart = session.documentToScreenPosition(this.start);
+ var screenPosEnd = session.documentToScreenPosition(this.end);
+
+ return new Range(
+ screenPosStart.row, screenPosStart.column,
+ screenPosEnd.row, screenPosEnd.column
+ );
+ };
+ this.moveBy = function (row, column) {
+ this.start.row += row;
+ this.start.column += column;
+ this.end.row += row;
+ this.end.column += column;
+ };
+
+ }).call(Range.prototype);
+ Range.fromPoints = function (start, end) {
+ return new Range(start.row, start.column, end.row, end.column);
+ };
+ Range.comparePoints = comparePoints;
+
+ Range.comparePoints = function (p1, p2) {
+ return p1.row - p2.row || p1.column - p2.column;
+ };
+
+
+ exports.Range = Range;
+});
+
+define('ace/anchor', ['require', 'exports', 'module' , 'ace/lib/oop', 'ace/lib/event_emitter'], function (require, exports, module) {
+ "use strict";
+
+ var oop = require("./lib/oop");
+ var EventEmitter = require("./lib/event_emitter").EventEmitter;
+
+ var Anchor = exports.Anchor = function (doc, row, column) {
+ this.$onChange = this.onChange.bind(this);
+ this.attach(doc);
+
+ if (typeof column == "undefined")
+ this.setPosition(row.row, row.column);
+ else
+ this.setPosition(row, column);
+ };
+
+ (function () {
+
+ oop.implement(this, EventEmitter);
+ this.getPosition = function () {
+ return this.$clipPositionToDocument(this.row, this.column);
+ };
+ this.getDocument = function () {
+ return this.document;
+ };
+ this.$insertRight = false;
+ this.onChange = function (e) {
+ var delta = e.data;
+ var range = delta.range;
+
+ if (range.start.row == range.end.row && range.start.row != this.row)
+ return;
+
+ if (range.start.row > this.row)
+ return;
+
+ if (range.start.row == this.row && range.start.column > this.column)
+ return;
+
+ var row = this.row;
+ var column = this.column;
+ var start = range.start;
+ var end = range.end;
+
+ if (delta.action === "insertText") {
+ if (start.row === row && start.column <= column) {
+ if (start.column === column && this.$insertRight) {
+ } else if (start.row === end.row) {
+ column += end.column - start.column;
+ } else {
+ column -= start.column;
+ row += end.row - start.row;
+ }
+ } else if (start.row !== end.row && start.row < row) {
+ row += end.row - start.row;
+ }
+ } else if (delta.action === "insertLines") {
+ if (start.row <= row) {
+ row += end.row - start.row;
+ }
+ } else if (delta.action === "removeText") {
+ if (start.row === row && start.column < column) {
+ if (end.column >= column)
+ column = start.column;
+ else
+ column = Math.max(0, column - (end.column - start.column));
+
+ } else if (start.row !== end.row && start.row < row) {
+ if (end.row === row)
+ column = Math.max(0, column - end.column) + start.column;
+ row -= (end.row - start.row);
+ } else if (end.row === row) {
+ row -= end.row - start.row;
+ column = Math.max(0, column - end.column) + start.column;
+ }
+ } else if (delta.action == "removeLines") {
+ if (start.row <= row) {
+ if (end.row <= row)
+ row -= end.row - start.row;
+ else {
+ row = start.row;
+ column = 0;
+ }
+ }
+ }
+
+ this.setPosition(row, column, true);
+ };
+ this.setPosition = function (row, column, noClip) {
+ var pos;
+ if (noClip) {
+ pos = {
+ row: row,
+ column: column
+ };
+ } else {
+ pos = this.$clipPositionToDocument(row, column);
+ }
+
+ if (this.row == pos.row && this.column == pos.column)
+ return;
+
+ var old = {
+ row: this.row,
+ column: this.column
+ };
+
+ this.row = pos.row;
+ this.column = pos.column;
+ this._emit("change", {
+ old: old,
+ value: pos
+ });
+ };
+ this.detach = function () {
+ this.document.removeEventListener("change", this.$onChange);
+ };
+ this.attach = function (doc) {
+ this.document = doc || this.document;
+ this.document.on("change", this.$onChange);
+ };
+ this.$clipPositionToDocument = function (row, column) {
+ var pos = {};
+
+ if (row >= this.document.getLength()) {
+ pos.row = Math.max(0, this.document.getLength() - 1);
+ pos.column = this.document.getLine(pos.row).length;
+ }
+ else if (row < 0) {
+ pos.row = 0;
+ pos.column = 0;
+ }
+ else {
+ pos.row = row;
+ pos.column = Math.min(this.document.getLine(pos.row).length, Math.max(0, column));
+ }
+
+ if (column < 0)
+ pos.column = 0;
+
+ return pos;
+ };
+
+ }).call(Anchor.prototype);
+
+});
+
+define('ace/lib/lang', ['require', 'exports', 'module' ], function (require, exports, module) {
+ "use strict";
+
+ exports.stringReverse = function (string) {
+ return string.split("").reverse().join("");
+ };
+
+ exports.stringRepeat = function (string, count) {
+ var result = '';
+ while (count > 0) {
+ if (count & 1)
+ result += string;
+
+ if (count >>= 1)
+ string += string;
+ }
+ return result;
+ };
+
+ var trimBeginRegexp = /^\s\s*/;
+ var trimEndRegexp = /\s\s*$/;
+
+ exports.stringTrimLeft = function (string) {
+ return string.replace(trimBeginRegexp, '');
+ };
+
+ exports.stringTrimRight = function (string) {
+ return string.replace(trimEndRegexp, '');
+ };
+
+ exports.copyObject = function (obj) {
+ var copy = {};
+ for (var key in obj) {
+ copy[key] = obj[key];
+ }
+ return copy;
+ };
+
+ exports.copyArray = function (array) {
+ var copy = [];
+ for (var i = 0, l = array.length; i < l; i++) {
+ if (array[i] && typeof array[i] == "object")
+ copy[i] = this.copyObject(array[i]);
+ else
+ copy[i] = array[i];
+ }
+ return copy;
+ };
+
+ exports.deepCopy = function (obj) {
+ if (typeof obj !== "object" || !obj)
+ return obj;
+ var cons = obj.constructor;
+ if (cons === RegExp)
+ return obj;
+
+ var copy = cons();
+ for (var key in obj) {
+ if (typeof obj[key] === "object") {
+ copy[key] = exports.deepCopy(obj[key]);
+ } else {
+ copy[key] = obj[key];
+ }
+ }
+ return copy;
+ };
+
+ exports.arrayToMap = function (arr) {
+ var map = {};
+ for (var i = 0; i < arr.length; i++) {
+ map[arr[i]] = 1;
+ }
+ return map;
+
+ };
+
+ exports.createMap = function (props) {
+ var map = Object.create(null);
+ for (var i in props) {
+ map[i] = props[i];
+ }
+ return map;
+ };
+ exports.arrayRemove = function (array, value) {
+ for (var i = 0; i <= array.length; i++) {
+ if (value === array[i]) {
+ array.splice(i, 1);
+ }
+ }
+ };
+
+ exports.escapeRegExp = function (str) {
+ return str.replace(/([.*+?^${}()|[\]\/\\])/g, '\\$1');
+ };
+
+ exports.escapeHTML = function (str) {
+ return str.replace(/&/g, "&").replace(/"/g, """).replace(/'/g, "'").replace(/= '0' && ch <= '9') {
+ string += ch;
+ next();
+ }
+ if (ch === '.') {
+ string += '.';
+ while (next() && ch >= '0' && ch <= '9') {
+ string += ch;
+ }
+ }
+ if (ch === 'e' || ch === 'E') {
+ string += ch;
+ next();
+ if (ch === '-' || ch === '+') {
+ string += ch;
+ next();
+ }
+ while (ch >= '0' && ch <= '9') {
+ string += ch;
+ next();
+ }
+ }
+ number = +string;
+ if (isNaN(number)) {
+ error("Bad number");
+ } else {
+ return number;
+ }
+ },
+
+ string = function () {
+
+ var hex,
+ i,
+ string = '',
+ uffff;
+
+ if (ch === '"') {
+ while (next()) {
+ if (ch === '"') {
+ next();
+ return string;
+ } else if (ch === '\\') {
+ next();
+ if (ch === 'u') {
+ uffff = 0;
+ for (i = 0; i < 4; i += 1) {
+ hex = parseInt(next(), 16);
+ if (!isFinite(hex)) {
+ break;
+ }
+ uffff = uffff * 16 + hex;
+ }
+ string += String.fromCharCode(uffff);
+ } else if (typeof escapee[ch] === 'string') {
+ string += escapee[ch];
+ } else {
+ break;
+ }
+ } else {
+ string += ch;
+ }
+ }
+ }
+ error("Bad string");
+ },
+
+ white = function () {
+
+ while (ch && ch <= ' ') {
+ next();
+ }
+ },
+
+ strictWhite = function () {
+
+ while (ch && ( ch == ' ' || ch == '\t')) {
+ next();
+ }
+ },
+
+ newLine = function () {
+ if (ch == '\n') next();
+ },
+
+ word = function () {
+
+ switch (ch) {
+ case 't':
+ next('t');
+ next('r');
+ next('u');
+ next('e');
+ return true;
+ case 'f':
+ next('f');
+ next('a');
+ next('l');
+ next('s');
+ next('e');
+ return false;
+ case 'n':
+ next('n');
+ next('u');
+ next('l');
+ next('l');
+ return null;
+ }
+ error("Unexpected '" + ch + "'");
+ },
+
+ // parses and returns the method
+ method = function () {
+ switch (ch) {
+ case 'G':
+ next('G');
+ next('E');
+ next('T');
+ return "GET";
+ case 'H':
+ next('H');
+ next('E');
+ next('A');
+ next('D');
+ return "HEAD";
+ case 'D':
+ next('D');
+ next('E');
+ next('L');
+ next('E');
+ next('T');
+ next('E');
+ return "DELETE";
+ case 'P':
+ next('P');
+ switch (ch) {
+ case 'U':
+ next('U');
+ next('T');
+ return "PUT";
+ case 'O':
+ next('O');
+ next('S');
+ next('T');
+ return "POST";
+ default:
+ error("Unexpected '" + ch + "'");
+ }
+ break;
+ default:
+ error("Expected one of GET/POST/PUT/DELETE/HEAD");
+ }
+
+ },
+
+ value, // Place holder for the value function.
+
+ array = function () {
+
+ var array = [];
+
+ if (ch === '[') {
+ next('[');
+ white();
+ if (ch === ']') {
+ next(']');
+ return array; // empty array
+ }
+ while (ch) {
+ array.push(value());
+ white();
+ if (ch === ']') {
+ next(']');
+ return array;
+ }
+ next(',');
+ white();
+ }
+ }
+ error("Bad array");
+ },
+
+ object = function () {
+
+ var key,
+ object = {};
+
+ if (ch === '{') {
+ next('{');
+ white();
+ if (ch === '}') {
+ next('}');
+ return object; // empty object
+ }
+ while (ch) {
+ key = string();
+ white();
+ next(':');
+ if (Object.hasOwnProperty.call(object, key)) {
+ error('Duplicate key "' + key + '"');
+ }
+ object[key] = value();
+ white();
+ if (ch === '}') {
+ next('}');
+ return object;
+ }
+ next(',');
+ white();
+ }
+ }
+ error("Bad object");
+ };
+
+ value = function () {
+
+ white();
+ switch (ch) {
+ case '{':
+ return object();
+ case '[':
+ return array();
+ case '"':
+ return string();
+ case '-':
+ return number();
+ default:
+ return ch >= '0' && ch <= '9' ? number() : word();
+ }
+ };
+
+ var url = function () {
+
+ var url = '';
+ while (ch && ch != '\n') {
+ url += ch;
+ next();
+ }
+ if (url == '')
+ error('Missing url');
+ return url;
+ },
+
+ request = function () {
+ white();
+ var meth = method();
+ strictWhite();
+ url();
+ strictWhite(); // advance to one new line
+ newLine();
+ strictWhite();
+ if (ch == '{') {
+ object();
+ }
+ // multi doc request
+ strictWhite(); // advance to one new line
+ newLine();
+ strictWhite();
+ while (ch == '{') {
+ // another object
+ object();
+ strictWhite();
+ newLine();
+ strictWhite();
+ }
+
+ },
+
+ comment = function () {
+ while (ch == '#') {
+ while (ch && ch !== '\n') {
+ next();
+ }
+ white();
+ }
+ },
+
+ multi_request = function () {
+ while (ch && ch != '') {
+ white();
+ if (!ch) {
+ continue;
+ }
+ try {
+ comment();
+ white();
+ if (!ch) {
+ continue;
+ }
+ request();
+ white();
+ }
+ catch (e) {
+ annotate("error", e.message);
+ // snap
+ var substring = text.substr(at);
+ var nextMatch = substring.search(/^POST|HEAD|GET|PUT|DELETE/m);
+ if (nextMatch < 1) return;
+ reset(at + nextMatch);
+ }
+ }
+ };
+
+
+ return function (source, reviver) {
+ var result;
+
+ text = source;
+ at = 0;
+ annos = [];
+ next();
+ multi_request();
+ white();
+ if (ch) {
+ annotate("error", "Syntax error");
+ }
+
+ result = { "annotations": annos };
+
+
+ return typeof reviver === 'function' ? (function walk(holder, key) {
+ var k, v, value = holder[key];
+ if (value && typeof value === 'object') {
+ for (k in value) {
+ if (Object.hasOwnProperty.call(value, k)) {
+ v = walk(value, k);
+ if (v !== undefined) {
+ value[k] = v;
+ } else {
+ delete value[k];
+ }
+ }
+ }
+ }
+ return reviver.call(holder, key, value);
+ })({'': result}, '') : result;
+ };
+});
+
+
+define("sense_editor/mode/worker", ['require', 'exports', 'module' , 'ace/lib/oop', 'ace/worker/mirror',
+ 'sense_editor/mode/worker_parser'], function (require, exports, module) {
+ "use strict";
+
+
+ var oop = require("ace/lib/oop");
+ var Mirror = require("ace/worker/mirror").Mirror;
+ var parse = require("sense_editor/mode/worker_parser");
+
+ var SenseWorker = exports.SenseWorker = function (sender) {
+ Mirror.call(this, sender);
+ this.setTimeout(200);
+ };
+
+ oop.inherits(SenseWorker, Mirror);
+
+ (function () {
+
+ this.onUpdate = function () {
+ var value = this.doc.getValue();
+ var pos, result;
+ try {
+ result = parse(value);
+ } catch (e) {
+ pos = this.charToDocumentPosition(e.at - 1);
+ this.sender.emit("error", {
+ row: pos.row,
+ column: pos.column,
+ text: e.message,
+ type: "error"
+ });
+ return;
+ }
+ for (var i = 0; i < result.annotations.length; i++) {
+ pos = this.charToDocumentPosition(result.annotations[i].at - 1);
+ result.annotations[i].row = pos.row;
+ result.annotations[i].column = pos.column;
+
+ }
+ this.sender.emit("ok", result.annotations);
+ };
+
+ this.charToDocumentPosition = function (charPos) {
+ var i = 0;
+ var len = this.doc.getLength();
+ var nl = this.doc.getNewLineCharacter().length;
+
+ if (!len) {
+ return { row: 0, column: 0};
+ }
+
+ var lineStart = 0, line;
+ while (i < len) {
+ line = this.doc.getLine(i);
+ var lineLength = line.length + nl;
+ if (lineStart + lineLength > charPos)
+ return {
+ row: i,
+ column: charPos - lineStart
+ };
+
+ lineStart += lineLength;
+ i += 1;
+ }
+
+ return {
+ row: i - 1,
+ column: line.length
+ };
+ };
+
+ }).call(SenseWorker.prototype);
+
+});
+
diff --git a/src/plugins/console/public/src/sense_editor/row_parser.js b/src/plugins/console/public/src/sense_editor/row_parser.js
new file mode 100644
index 0000000000000..f7cbbe44d624f
--- /dev/null
+++ b/src/plugins/console/public/src/sense_editor/row_parser.js
@@ -0,0 +1,123 @@
+let MODE = {
+ REQUEST_START: 2,
+ IN_REQUEST: 4,
+ MULTI_DOC_CUR_DOC_END: 8,
+ REQUEST_END: 16,
+ BETWEEN_REQUESTS: 32
+
+};
+
+function RowParser(editor) {
+ var defaultEditor = editor;
+
+ this.getRowParseMode = function (row) {
+ if (row == null || typeof row == "undefined") {
+ row = editor.getCursorPosition().row;
+ }
+
+ var session = editor.getSession();
+ if (row >= session.getLength() || row < 0) {
+ return MODE.BETWEEN_REQUESTS;
+ }
+ var mode = session.getState(row);
+ if (!mode) {
+ return MODE.BETWEEN_REQUESTS;
+ } // shouldn't really happen
+
+ if (mode !== "start") {
+ return MODE.IN_REQUEST;
+ }
+ var line = (session.getLine(row) || "").trim();
+ if (!line || line[0] === '#') {
+ return MODE.BETWEEN_REQUESTS;
+ } // empty line or a comment waiting for a new req to start
+
+ if (line.indexOf("}", line.length - 1) >= 0) {
+ // check for a multi doc request (must start a new json doc immediately after this one end.
+ row++;
+ if (row < session.getLength()) {
+ line = (session.getLine(row) || "").trim();
+ if (line.indexOf("{") === 0) { // next line is another doc in a multi doc
+ return MODE.MULTI_DOC_CUR_DOC_END | MODE.IN_REQUEST;
+ }
+
+ }
+ return MODE.REQUEST_END | MODE.MULTI_DOC_CUR_DOC_END; // end of request
+ }
+
+ // check for single line requests
+ row++;
+ if (row >= session.getLength()) {
+ return MODE.REQUEST_START | MODE.REQUEST_END;
+ }
+ line = (session.getLine(row) || "").trim();
+ if (line.indexOf("{") !== 0) { // next line is another request
+ return MODE.REQUEST_START | MODE.REQUEST_END;
+ }
+
+ return MODE.REQUEST_START;
+ };
+
+ this.rowPredicate = function (row, editor, value) {
+ var mode = this.getRowParseMode(row, editor);
+ return (mode & value) > 0;
+ };
+
+ this.isEndRequestRow = function (row, _e) {
+ var editor = _e || defaultEditor;
+ return this.rowPredicate(row, editor, MODE.REQUEST_END);
+ };
+
+ this.isRequestEdge = function (row, _e) {
+ var editor = _e || defaultEditor;
+ return this.rowPredicate(row, editor, MODE.REQUEST_END | MODE.REQUEST_START);
+ };
+
+ this.isStartRequestRow = function (row, _e) {
+ var editor = _e || defaultEditor;
+ return this.rowPredicate(row, editor, MODE.REQUEST_START);
+ };
+
+ this.isInBetweenRequestsRow = function (row, _e) {
+ var editor = _e || defaultEditor;
+ return this.rowPredicate(row, editor, MODE.BETWEEN_REQUESTS);
+ };
+
+ this.isInRequestsRow = function (row, _e) {
+ var editor = _e || defaultEditor;
+ return this.rowPredicate(row, editor, MODE.IN_REQUEST);
+ };
+
+ this.isMultiDocDocEndRow = function (row, _e) {
+ var editor = _e || defaultEditor;
+ return this.rowPredicate(row, editor, MODE.MULTI_DOC_CUR_DOC_END);
+ };
+
+ this.isEmptyToken = function (tokenOrTokenIter) {
+ var token = tokenOrTokenIter && tokenOrTokenIter.getCurrentToken ? tokenOrTokenIter.getCurrentToken() : tokenOrTokenIter;
+ return !token || token.type == "whitespace"
+ };
+
+ this.isUrlOrMethodToken = function (tokenOrTokenIter) {
+ var t = tokenOrTokenIter.getCurrentToken ? tokenOrTokenIter.getCurrentToken() : tokenOrTokenIter;
+ return t && t.type && (t.type == "method" || t.type.indexOf("url") === 0);
+ };
+
+
+ this.nextNonEmptyToken = function (tokenIter) {
+ var t = tokenIter.stepForward();
+ while (t && this.isEmptyToken(t)) t = tokenIter.stepForward();
+ return t;
+ };
+
+ this.prevNonEmptyToken = function (tokenIter) {
+ var t = tokenIter.stepBackward();
+ // empty rows return null token.
+ while ((t || tokenIter.getCurrentTokenRow() > 0) && this.isEmptyToken(t)) t = tokenIter.stepBackward();
+ return t;
+ };
+}
+
+RowParser.prototype.MODE = MODE;
+
+module.exports = RowParser;
diff --git a/src/plugins/console/public/src/sense_editor/theme-sense-dark.js b/src/plugins/console/public/src/sense_editor/theme-sense-dark.js
new file mode 100644
index 0000000000000..bf25de1cceee9
--- /dev/null
+++ b/src/plugins/console/public/src/sense_editor/theme-sense-dark.js
@@ -0,0 +1,115 @@
+let ace = require('ace');
+
+ace.define("ace/theme/sense-dark", ['require', 'exports', 'module'],
+ function (require, exports, module) {
+ exports.isDark = true;
+ exports.cssClass = "ace-sense-dark";
+ exports.cssText = ".ace-sense-dark .ace_gutter {\
+background: #2e3236;\
+color: #bbbfc2;\
+}\
+.ace-sense-dark .ace_print-margin {\
+width: 1px;\
+background: #555651\
+}\
+.ace-sense-dark .ace_scroller {\
+background-color: #202328;\
+}\
+.ace-sense-dark .ace_content {\
+}\
+.ace-sense-dark .ace_text-layer {\
+color: #F8F8F2\
+}\
+.ace-sense-dark .ace_cursor {\
+border-left: 2px solid #F8F8F0\
+}\
+.ace-sense-dark .ace_overwrite-cursors .ace_cursor {\
+border-left: 0px;\
+border-bottom: 1px solid #F8F8F0\
+}\
+.ace-sense-dark .ace_marker-layer .ace_selection {\
+background: #222\
+}\
+.ace-sense-dark.ace_multiselect .ace_selection.ace_start {\
+box-shadow: 0 0 3px 0px #272822;\
+border-radius: 2px\
+}\
+.ace-sense-dark .ace_marker-layer .ace_step {\
+background: rgb(102, 82, 0)\
+}\
+.ace-sense-dark .ace_marker-layer .ace_bracket {\
+margin: -1px 0 0 -1px;\
+border: 1px solid #49483E\
+}\
+.ace-sense-dark .ace_marker-layer .ace_active-line {\
+background: #202020\
+}\
+.ace-sense-dark .ace_gutter-active-line {\
+background-color: #272727\
+}\
+.ace-sense-dark .ace_marker-layer .ace_selected-word {\
+border: 1px solid #49483E\
+}\
+.ace-sense-dark .ace_invisible {\
+color: #49483E\
+}\
+.ace-sense-dark .ace_entity.ace_name.ace_tag,\
+.ace-sense-dark .ace_keyword,\
+.ace-sense-dark .ace_meta,\
+.ace-sense-dark .ace_storage {\
+color: #F92672\
+}\
+.ace-sense-dark .ace_constant.ace_character,\
+.ace-sense-dark .ace_constant.ace_language,\
+.ace-sense-dark .ace_constant.ace_numeric,\
+.ace-sense-dark .ace_constant.ace_other {\
+color: #AE81FF\
+}\
+.ace-sense-dark .ace_invalid {\
+color: #F8F8F0;\
+background-color: #F92672\
+}\
+.ace-sense-dark .ace_invalid.ace_deprecated {\
+color: #F8F8F0;\
+background-color: #AE81FF\
+}\
+.ace-sense-dark .ace_support.ace_constant,\
+.ace-sense-dark .ace_support.ace_function {\
+color: #66D9EF\
+}\
+.ace-sense-dark .ace_fold {\
+background-color: #A6E22E;\
+border-color: #F8F8F2\
+}\
+.ace-sense-dark .ace_storage.ace_type,\
+.ace-sense-dark .ace_support.ace_class,\
+.ace-sense-dark .ace_support.ace_type {\
+font-style: italic;\
+color: #66D9EF\
+}\
+.ace-sense-dark .ace_entity.ace_name.ace_function,\
+.ace-sense-dark .ace_entity.ace_other.ace_attribute-name,\
+.ace-sense-dark .ace_variable {\
+color: #A6E22E\
+}\
+.ace-sense-dark .ace_variable.ace_parameter {\
+font-style: italic;\
+color: #FD971F\
+}\
+.ace-sense-dark .ace_string {\
+color: #E6DB74\
+}\
+.ace-sense-dark .ace_comment {\
+color: #629755\
+}\
+.ace-sense-dark .ace_markup.ace_underline {\
+text-decoration: underline\
+}\
+.ace-sense-dark .ace_indent-guide {\
+background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAEklEQVQImWNQ11D6z7Bq1ar/ABCKBG6g04U2AAAAAElFTkSuQmCC) right repeat-y\
+}";
+
+ var dom = require("ace/lib/dom");
+ dom.importCssString(exports.cssText, exports.cssClass);
+ });
+
diff --git a/src/plugins/console/public/src/sense_editor_resize.js b/src/plugins/console/public/src/sense_editor_resize.js
new file mode 100644
index 0000000000000..ef989c1b63493
--- /dev/null
+++ b/src/plugins/console/public/src/sense_editor_resize.js
@@ -0,0 +1,11 @@
+import ResizeCheckerProvider from 'ui/vislib/lib/resize_checker'
+
+export function useResizeCheckerProvider(Private) {
+ const ResizeChecker = Private(ResizeCheckerProvider);
+
+ return function useResizeChecker($scope, $el, ...editors) {
+ const checker = new ResizeChecker($el);
+ checker.on('resize', () => editors.forEach(e => e.resize()));
+ $scope.$on('$destroy', () => checker.destroy())
+ }
+}
diff --git a/src/plugins/console/public/src/settings.js b/src/plugins/console/public/src/settings.js
new file mode 100644
index 0000000000000..49c11b37c79fd
--- /dev/null
+++ b/src/plugins/console/public/src/settings.js
@@ -0,0 +1,73 @@
+let $ = require('jquery');
+let es = require('./es');
+const storage = require('./storage');
+
+function getFontSize() {
+ return storage.get('font_size', 14);
+}
+
+function setFontSize(size) {
+ storage.set('font_size', size);
+ applyCurrentSettings();
+ return true;
+}
+
+function getWrapMode() {
+ return storage.get('wrap_mode', true);
+}
+
+function setWrapMode(mode) {
+ storage.set('wrap_mode', mode);
+ applyCurrentSettings();
+ return true;
+}
+
+function setBasicAuth(mode) {
+ storage.set('basic_auth', mode);
+ applyCurrentSettings();
+ return true;
+}
+
+function getAutocomplete() {
+ return storage.get('autocomplete_settings', { fields: true, indices: true });
+}
+
+function setAutocomplete(settings) {
+ storage.set('autocomplete_settings', settings);
+ return true;
+}
+
+function applyCurrentSettings(editor) {
+ if (typeof editor === 'undefined') {
+ applyCurrentSettings(require('./input'));
+ applyCurrentSettings(require('./output'));
+ }
+ if (editor) {
+ editor.getSession().setUseWrapMode(getWrapMode());
+ editor.$el.css('font-size', getFontSize() + 'px');
+ }
+}
+
+function getCurrentSettings() {
+ return {
+ autocomplete: getAutocomplete(),
+ wrapMode: getWrapMode(),
+ fontSize: parseFloat(getFontSize()),
+ };
+}
+
+function updateSettings({ fontSize, wrapMode, autocomplete}) {
+ setFontSize(fontSize);
+ setWrapMode(wrapMode);
+ setAutocomplete(autocomplete);
+ require('./input').focus();
+ return getCurrentSettings();
+}
+
+module.exports = {
+ getAutocomplete,
+ applyCurrentSettings,
+
+ getCurrentSettings,
+ updateSettings,
+};
diff --git a/src/plugins/console/public/src/smart_resize.js b/src/plugins/console/public/src/smart_resize.js
new file mode 100644
index 0000000000000..5f943d1579e14
--- /dev/null
+++ b/src/plugins/console/public/src/smart_resize.js
@@ -0,0 +1,6 @@
+import { throttle } from 'lodash';
+
+module.exports = function (editor) {
+ const resize = editor.resize;
+ return throttle(() => resize.call(editor), 35)
+};
diff --git a/src/plugins/console/public/src/storage.js b/src/plugins/console/public/src/storage.js
new file mode 100644
index 0000000000000..c23a11f1d9f90
--- /dev/null
+++ b/src/plugins/console/public/src/storage.js
@@ -0,0 +1,58 @@
+const { transform, keys, startsWith } = require('lodash');
+
+class Storage {
+ constructor(engine, prefix) {
+ this.engine = engine;
+ this.prefix = prefix;
+ }
+
+ encode(val) {
+ return JSON.stringify(val);
+ }
+
+ decode(val) {
+ if (typeof val === 'string') {
+ return JSON.parse(val);
+ }
+ }
+
+ encodeKey(key) {
+ return `${this.prefix}${key}`;
+ }
+
+ decodeKey(key) {
+ if (startsWith(key, this.prefix)) {
+ return `${key.slice(this.prefix.length)}`;
+ }
+ }
+
+ set(key, val) {
+ this.engine.setItem(this.encodeKey(key), this.encode(val));
+ return val;
+ }
+
+ has(key) {
+ return this.engine.getItem(this.encodeKey(key)) != null;
+ }
+
+ get(key, _default) {
+ if (this.has(key)) {
+ return this.decode(this.engine.getItem(this.encodeKey(key)));
+ } else {
+ return _default;
+ }
+ }
+
+ delete(key) {
+ return this.engine.removeItem(this.encodeKey(key));
+ }
+
+ keys() {
+ return transform(keys(this.engine), (ours, key) => {
+ const ourKey = this.decodeKey(key);
+ if (ourKey != null) ours.push(ourKey);
+ });
+ }
+}
+
+module.exports = new Storage(localStorage, 'sense:');
diff --git a/src/plugins/console/public/src/utils.js b/src/plugins/console/public/src/utils.js
new file mode 100644
index 0000000000000..0c6425d958056
--- /dev/null
+++ b/src/plugins/console/public/src/utils.js
@@ -0,0 +1,45 @@
+var utils = {};
+
+utils.textFromRequest = function (request) {
+ var data = request.data;
+ if (typeof data != "string") {
+ data = data.join("\n");
+ }
+ return request.method + " " + request.url + "\n" + data;
+};
+
+utils.getUrlParam = function (name) {
+ name = name.replace(/[\[]/, "\\\[").replace(/[\]]/, "\\\]");
+ var regex = new RegExp("[\\?&]" + name + "=([^]*)"),
+ results = regex.exec(location.search);
+ return results == null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
+};
+
+utils.jsonToString = function (data, indent) {
+ return JSON.stringify(data, null, indent ? 2 : 0);
+};
+
+utils.reformatData = function (data, indent) {
+ var changed = false;
+ var formatted_data = [];
+ for (var i = 0; i < data.length; i++) {
+ var cur_doc = data[i];
+ try {
+ var new_doc = utils.jsonToString(JSON.parse(cur_doc), indent ? 2 : 0);
+ changed = changed || new_doc != cur_doc;
+ formatted_data.push(new_doc);
+ }
+ catch (e) {
+ console.log(e);
+ formatted_data.push(cur_doc);
+ }
+ }
+
+ return {
+ changed: changed,
+ data: formatted_data
+ };
+};
+
+
+module.exports = utils;
diff --git a/src/plugins/console/public/tests/index.html b/src/plugins/console/public/tests/index.html
new file mode 100644
index 0000000000000..b7f7240eb227e
--- /dev/null
+++ b/src/plugins/console/public/tests/index.html
@@ -0,0 +1,36 @@
+
+
+
- ').appendTo($container);
-
- var $window = $(window);
- var $body = $(document.body);
- var binder = new Binder($scope);
-
- // appState from controller
- var $state = $scope.state;
-
- var gridster; // defined in init()
-
- // number of columns to render
- var COLS = 12;
- // number of pixed between each column/row
- var SPACER = 10;
- // pixels used by all of the spacers (gridster puts have a spacer on the ends)
- var spacerSize = SPACER * COLS;
-
- // debounced layout function is safe to call as much as possible
- var safeLayout = _.debounce(layout, 200);
-
- function init() {
- $el.addClass('gridster');
-
- gridster = $el.gridster({
- max_cols: COLS,
- min_cols: COLS,
- autogenerate_stylesheet: false,
- resize: {
- enabled: true,
- stop: readGridsterChangeHandler
- },
- draggable: {
- handle: '.panel-heading, .panel-title',
- stop: readGridsterChangeHandler
- }
- }).data('gridster');
-
- // This is necessary to enable text selection within gridster elements
- // http://stackoverflow.com/questions/21561027/text-not-selectable-from-editable-div-which-is-draggable
- binder.jqOn($el, 'mousedown', function () {
- gridster.disable().disable_resize();
- });
- binder.jqOn($el, 'mouseup', function enableResize() {
- gridster.enable().enable_resize();
+import _ from 'lodash';
+import $ from 'jquery';
+import Binder from 'ui/binder';
+import 'gridster';
+import uiModules from 'ui/modules';
+
+const app = uiModules.get('app/dashboard');
+
+app.directive('dashboardGrid', function ($compile, Notifier) {
+ return {
+ restrict: 'E',
+ require: '^dashboardApp', // must inherit from the dashboardApp
+ link: function ($scope, $el) {
+ const notify = new Notifier();
+ const $container = $el;
+ $el = $('
').appendTo($container);
+
+ const $window = $(window);
+ const $body = $(document.body);
+ const binder = new Binder($scope);
+
+ // appState from controller
+ const $state = $scope.state;
+
+ let gridster; // defined in init()
+
+ // number of columns to render
+ const COLS = 12;
+ // number of pixed between each column/row
+ const SPACER = 0;
+ // pixels used by all of the spacers (gridster puts have a spacer on the ends)
+ const spacerSize = SPACER * COLS;
+
+ // debounced layout function is safe to call as much as possible
+ const safeLayout = _.debounce(layout, 200);
+
+ function init() {
+ $el.addClass('gridster');
+
+ gridster = $el.gridster({
+ max_cols: COLS,
+ min_cols: COLS,
+ autogenerate_stylesheet: false,
+ resize: {
+ enabled: true,
+ stop: readGridsterChangeHandler
+ },
+ draggable: {
+ handle: '.panel-move, .fa-arrows',
+ stop: readGridsterChangeHandler
+ }
+ }).data('gridster');
+
+ // This is necessary to enable text selection within gridster elements
+ // http://stackoverflow.com/questions/21561027/text-not-selectable-from-editable-div-which-is-draggable
+ binder.jqOn($el, 'mousedown', function () {
+ gridster.disable().disable_resize();
+ });
+ binder.jqOn($el, 'mouseup', function enableResize() {
+ gridster.enable().enable_resize();
+ });
+
+ $scope.$watchCollection('state.panels', function (panels) {
+ const currentPanels = gridster.$widgets.toArray().map(function (el) {
+ return getPanelFor(el);
});
- $scope.$watchCollection('state.panels', function (panels) {
- var currentPanels = gridster.$widgets.toArray().map(function (el) {
- return getPanelFor(el);
- });
+ // panels that are now missing from the panels array
+ const removed = _.difference(currentPanels, panels);
- // panels that are now missing from the panels array
- var removed = _.difference(currentPanels, panels);
+ // panels that have been added
+ const added = _.difference(panels, currentPanels);
- // panels that have been added
- var added = _.difference(panels, currentPanels);
+ if (removed.length) removed.forEach(removePanel);
+ if (added.length) added.forEach(addPanel);
- if (removed.length) removed.forEach(removePanel);
- if (added.length) added.forEach(addPanel);
+ // ensure that every panel can be serialized now that we are done
+ $state.panels.forEach(makePanelSerializeable);
- // ensure that every panel can be serialized now that we are done
- $state.panels.forEach(makePanelSerializeable);
+ // alert interested parties that we have finished processing changes to the panels
+ // TODO: change this from event based to calling a method on dashboardApp
+ if (added.length || removed.length) $scope.$root.$broadcast('change:vis');
+ });
- // alert interested parties that we have finished processing changes to the panels
- // TODO: change this from event based to calling a method on dashboardApp
- if (added.length || removed.length) $scope.$root.$broadcast('change:vis');
- });
+ $scope.$on('$destroy', function () {
+ safeLayout.cancel();
+ $window.off('resize', safeLayout);
- $scope.$on('$destroy', function () {
- $window.off('resize', safeLayout);
-
- if (!gridster) return;
- gridster.$widgets.each(function (i, el) {
- var panel = getPanelFor(el);
- removePanel(panel);
- // stop any animations
- panel.$el.stop();
- // not that we will, but lets be safe
- makePanelSerializeable(panel);
- });
+ if (!gridster) return;
+ gridster.$widgets.each(function (i, el) {
+ const panel = getPanelFor(el);
+ // stop any animations
+ panel.$el.stop();
+ removePanel(panel, true);
+ // not that we will, but lets be safe
+ makePanelSerializeable(panel);
});
+ });
- safeLayout();
- $window.on('resize', safeLayout);
- $scope.$on('ready:vis', safeLayout);
- }
-
- // return the panel object for an element.
- //
- // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- // ALWAYS CALL makePanelSerializeable AFTER YOU ARE DONE WITH IT
- // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- function getPanelFor(el) {
- var $panel = el.jquery ? el : $(el);
- var panel = $panel.data('panel');
+ safeLayout();
+ $window.on('resize', safeLayout);
+ $scope.$on('ready:vis', safeLayout);
+ }
- panel.$el = $panel;
- panel.$scope = $panel.data('$scope');
+ // return the panel object for an element.
+ //
+ // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ // ALWAYS CALL makePanelSerializeable AFTER YOU ARE DONE WITH IT
+ // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ function getPanelFor(el) {
+ const $panel = el.jquery ? el : $(el);
+ const panel = $panel.data('panel');
- return panel;
- }
+ panel.$el = $panel;
+ panel.$scope = $panel.data('$scope');
- // since the $el and $scope are circular structures, they need to be
- // removed from panel before it can be serialized (we also wouldn't
- // want them to show up in the url)
- function makePanelSerializeable(panel) {
- delete panel.$el;
- delete panel.$scope;
- }
+ return panel;
+ }
- // tell gridster to remove the panel, and cleanup our metadata
- function removePanel(panel) {
- // remove from grister 'silently' (don't reorganize after)
- gridster.remove_widget(panel.$el);
+ // since the $el and $scope are circular structures, they need to be
+ // removed from panel before it can be serialized (we also wouldn't
+ // want them to show up in the url)
+ function makePanelSerializeable(panel) {
+ delete panel.$el;
+ delete panel.$scope;
+ }
- // destroy the scope
- panel.$scope.$destroy();
+ // tell gridster to remove the panel, and cleanup our metadata
+ function removePanel(panel, silent) {
+ // remove from grister 'silently' (don't reorganize after)
+ gridster.remove_widget(panel.$el, silent);
- panel.$el.removeData('panel');
- panel.$el.removeData('$scope');
- }
+ // destroy the scope
+ panel.$scope.$destroy();
- // tell gridster to add the panel, and create additional meatadata like $scope
- function addPanel(panel) {
- _.defaults(panel, {
- size_x: 3,
- size_y: 2
- });
+ panel.$el.removeData('panel');
+ panel.$el.removeData('$scope');
+ }
- // ignore panels that don't have vis id's
- if (!panel.id) {
- // In the interest of backwards compat
- if (panel.visId) {
- panel.id = panel.visId;
- panel.type = 'visualization';
- delete panel.visId;
- } else {
- throw new Error('missing object id on panel');
- }
+ // tell gridster to add the panel, and create additional meatadata like $scope
+ function addPanel(panel) {
+ _.defaults(panel, {
+ size_x: 3,
+ size_y: 2
+ });
+
+ // ignore panels that don't have vis id's
+ if (!panel.id) {
+ // In the interest of backwards compat
+ if (panel.visId) {
+ panel.id = panel.visId;
+ panel.type = 'visualization';
+ delete panel.visId;
+ } else {
+ throw new Error('missing object id on panel');
}
+ }
- panel.$scope = $scope.$new();
- panel.$scope.panel = panel;
- panel.$scope.parentUiState = $scope.uiState;
-
- panel.$el = $compile('
')(panel.$scope);
- // stash the panel and it's scope in the element's data
- panel.$el.data('panel', panel);
- panel.$el.data('$scope', panel.$scope);
- }
-
- // ensure that the panel object has the latest size/pos info
- function refreshPanelStats(panel) {
- var data = panel.$el.coords().grid;
- panel.size_x = data.size_x;
- panel.size_y = data.size_y;
- panel.col = data.col;
- panel.row = data.row;
- }
+ // tell gridster to use the widget
+ gridster.add_widget(panel.$el, panel.size_x, panel.size_y, panel.col, panel.row);
- // when gridster tell us it made a change, update each of the panel objects
- function readGridsterChangeHandler(e, ui, $widget) {
- // ensure that our panel objects keep their size in sync
- gridster.$widgets.each(function (i, el) {
- var panel = getPanelFor(el);
- refreshPanelStats(panel);
- panel.$scope.$broadcast('resize');
- makePanelSerializeable(panel);
- $scope.$root.$broadcast('change:vis');
- });
- }
+ // update size/col/etc.
+ refreshPanelStats(panel);
- // calculate the position and sizing of the gridster el, and the columns within it
- // then tell gridster to "reflow" -- which is definitely not supported.
- // we may need to consider using a different library
- function reflowGridster() {
- // https://github.com/gcphost/gridster-responsive/blob/97fe43d4b312b409696b1d702e1afb6fbd3bba71/jquery.gridster.js#L1208-L1235
- var g = gridster;
-
- g.options.widget_margins = [SPACER / 2, SPACER / 2];
- g.options.widget_base_dimensions = [($container.width() - spacerSize) / COLS, 100];
- g.min_widget_width = (g.options.widget_margins[0] * 2) + g.options.widget_base_dimensions[0];
- g.min_widget_height = (g.options.widget_margins[1] * 2) + g.options.widget_base_dimensions[1];
-
- // var serializedGrid = g.serialize();
- g.$widgets.each(function (i, widget) {
- g.resize_widget($(widget));
- });
+ // stash the panel and it's scope in the element's data
+ panel.$el.data('panel', panel);
+ panel.$el.data('$scope', panel.$scope);
+ }
- g.generate_grid_and_stylesheet();
- g.generate_stylesheet({ namespace: '.gridster' });
+ // ensure that the panel object has the latest size/pos info
+ function refreshPanelStats(panel) {
+ const data = panel.$el.coords().grid;
+ panel.size_x = data.size_x;
+ panel.size_y = data.size_y;
+ panel.col = data.col;
+ panel.row = data.row;
+ }
- g.get_widgets_from_DOM();
- // We can't call this method if the gridmap is empty. This was found
- // when the user double clicked the "New Dashboard" icon. See
- // https://github.com/elastic/kibana4/issues/390
- if (gridster.gridmap.length > 0) g.set_dom_grid_height();
- g.drag_api.set_limits(COLS * g.min_widget_width);
- }
+ // when gridster tell us it made a change, update each of the panel objects
+ function readGridsterChangeHandler(e, ui, $widget) {
+ // ensure that our panel objects keep their size in sync
+ gridster.$widgets.each(function (i, el) {
+ const panel = getPanelFor(el);
+ refreshPanelStats(panel);
+ panel.$scope.$broadcast('resize');
+ makePanelSerializeable(panel);
+ $scope.$root.$broadcast('change:vis');
+ });
+ }
- function layout() {
- var complete = notify.event('reflow dashboard');
- reflowGridster();
- readGridsterChangeHandler();
- complete();
- }
+ // calculate the position and sizing of the gridster el, and the columns within it
+ // then tell gridster to "reflow" -- which is definitely not supported.
+ // we may need to consider using a different library
+ function reflowGridster() {
+ // https://github.com/gcphost/gridster-responsive/blob/97fe43d4b312b409696b1d702e1afb6fbd3bba71/jquery.gridster.js#L1208-L1235
+ const g = gridster;
+
+ g.options.widget_margins = [SPACER / 2, SPACER / 2];
+ g.options.widget_base_dimensions = [($container.width() - spacerSize) / COLS, 100];
+ g.min_widget_width = (g.options.widget_margins[0] * 2) + g.options.widget_base_dimensions[0];
+ g.min_widget_height = (g.options.widget_margins[1] * 2) + g.options.widget_base_dimensions[1];
+
+ // const serializedGrid = g.serialize();
+ g.$widgets.each(function (i, widget) {
+ g.resize_widget($(widget));
+ });
+
+ g.generate_grid_and_stylesheet();
+ g.generate_stylesheet({ namespace: '.gridster' });
+
+ g.get_widgets_from_DOM();
+ // We can't call this method if the gridmap is empty. This was found
+ // when the user double clicked the "New Dashboard" icon. See
+ // https://github.com/elastic/kibana4/issues/390
+ if (gridster.gridmap.length > 0) g.set_dom_grid_height();
+ g.drag_api.set_limits(COLS * g.min_widget_width);
+ }
- init();
+ function layout() {
+ const complete = notify.event('reflow dashboard');
+ reflowGridster();
+ readGridsterChangeHandler();
+ complete();
}
- };
- });
+ init();
+ }
+ };
});
diff --git a/src/plugins/kibana/public/dashboard/index.html b/src/plugins/kibana/public/dashboard/index.html
index d2a2688c801c3..8bc0f088aecf2 100644
--- a/src/plugins/kibana/public/dashboard/index.html
+++ b/src/plugins/kibana/public/dashboard/index.html
@@ -1,7 +1,13 @@
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
Ready to get started?
-
Click the button in the menu bar above to add a visualization to the dashboard. If you haven't setup a visualization yet visit the "Visualize" tab to create your first visualization.
+
Click the Add button in the menu bar above to add a visualization to the dashboard. If you haven't setup a visualization yet visit the "Visualize" tab to create your first visualization.
diff --git a/src/plugins/kibana/public/dashboard/index.js b/src/plugins/kibana/public/dashboard/index.js
index 45e247b498755..e8322ea212c16 100644
--- a/src/plugins/kibana/public/dashboard/index.js
+++ b/src/plugins/kibana/public/dashboard/index.js
@@ -1,251 +1,270 @@
-define(function (require) {
- var _ = require('lodash');
- var $ = require('jquery');
- var angular = require('angular');
- var ConfigTemplate = require('ui/ConfigTemplate');
- var chrome = require('ui/chrome');
-
- require('ui/directives/config');
- require('ui/courier');
- require('ui/config');
- require('ui/notify');
- require('ui/typeahead');
-
- require('plugins/kibana/dashboard/directives/grid');
- require('plugins/kibana/dashboard/components/panel/panel');
- require('plugins/kibana/dashboard/services/saved_dashboards');
- require('plugins/kibana/dashboard/styles/main.less');
-
- require('ui/saved_objects/saved_object_registry').register(require('plugins/kibana/dashboard/services/saved_dashboard_register'));
-
-
- var app = require('ui/modules').get('app/dashboard', [
- 'elasticsearch',
- 'ngRoute',
- 'kibana/courier',
- 'kibana/config',
- 'kibana/notify',
- 'kibana/typeahead'
- ]);
-
- require('ui/routes')
- .when('/dashboard', {
- template: require('plugins/kibana/dashboard/index.html'),
- resolve: {
- dash: function (savedDashboards, config) {
- return savedDashboards.get();
- }
+import _ from 'lodash';
+import $ from 'jquery';
+import angular from 'angular';
+import chrome from 'ui/chrome';
+import 'ui/courier';
+import 'ui/config';
+import 'ui/notify';
+import 'ui/typeahead';
+import 'ui/navbar_extensions';
+import 'ui/share';
+import 'plugins/kibana/dashboard/directives/grid';
+import 'plugins/kibana/dashboard/components/panel/panel';
+import 'plugins/kibana/dashboard/services/saved_dashboards';
+import 'plugins/kibana/dashboard/styles/main.less';
+import FilterBarQueryFilterProvider from 'ui/filter_bar/query_filter';
+import DocTitleProvider from 'ui/doc_title';
+import uiRoutes from 'ui/routes';
+import uiModules from 'ui/modules';
+import indexTemplate from 'plugins/kibana/dashboard/index.html';
+
+
+
+require('ui/saved_objects/saved_object_registry').register(require('plugins/kibana/dashboard/services/saved_dashboard_register'));
+
+
+const app = uiModules.get('app/dashboard', [
+ 'elasticsearch',
+ 'ngRoute',
+ 'kibana/courier',
+ 'kibana/config',
+ 'kibana/notify',
+ 'kibana/typeahead'
+]);
+
+uiRoutes
+.when('/dashboard', {
+ template: indexTemplate,
+ resolve: {
+ dash: function (savedDashboards, config) {
+ return savedDashboards.get();
}
- })
- .when('/dashboard/:id', {
- template: require('plugins/kibana/dashboard/index.html'),
- resolve: {
- dash: function (savedDashboards, Notifier, $route, $location, courier) {
- return savedDashboards.get($route.current.params.id)
- .catch(courier.redirectWhenMissing({
- 'dashboard' : '/dashboard'
- }));
- }
+ }
+})
+.when('/dashboard/:id', {
+ template: indexTemplate,
+ resolve: {
+ dash: function (savedDashboards, Notifier, $route, $location, courier) {
+ return savedDashboards.get($route.current.params.id)
+ .catch(courier.redirectWhenMissing({
+ 'dashboard' : '/dashboard'
+ }));
}
- });
+ }
+});
- app.directive('dashboardApp', function (Notifier, courier, AppState, timefilter, kbnUrl) {
- return {
- controller: function ($scope, $rootScope, $route, $routeParams, $location, Private, getAppState) {
+app.directive('dashboardApp', function (Notifier, courier, AppState, timefilter, kbnUrl) {
+ return {
+ controller: function ($scope, $rootScope, $route, $routeParams, $location, Private, getAppState) {
- var queryFilter = Private(require('ui/filter_bar/query_filter'));
+ const queryFilter = Private(FilterBarQueryFilterProvider);
- var notify = new Notifier({
- location: 'Dashboard'
- });
+ const notify = new Notifier({
+ location: 'Dashboard'
+ });
- var dash = $scope.dash = $route.current.locals.dash;
+ const dash = $scope.dash = $route.current.locals.dash;
- if (dash.timeRestore && dash.timeTo && dash.timeFrom && !getAppState.previouslyStored()) {
- timefilter.time.to = dash.timeTo;
- timefilter.time.from = dash.timeFrom;
+ if (dash.timeRestore && dash.timeTo && dash.timeFrom && !getAppState.previouslyStored()) {
+ timefilter.time.to = dash.timeTo;
+ timefilter.time.from = dash.timeFrom;
+ if (dash.refreshInterval) {
+ timefilter.refreshInterval = dash.refreshInterval;
}
+ }
- $scope.$on('$destroy', dash.destroy);
-
- var matchQueryFilter = function (filter) {
- return filter.query && filter.query.query_string && !filter.meta;
- };
-
- var extractQueryFromFilters = function (filters) {
- var filter = _.find(filters, matchQueryFilter);
- if (filter) return filter.query;
- };
-
- var stateDefaults = {
- title: dash.title,
- panels: dash.panelsJSON ? JSON.parse(dash.panelsJSON) : [],
- options: dash.optionsJSON ? JSON.parse(dash.optionsJSON) : {},
- uiState: dash.uiStateJSON ? JSON.parse(dash.uiStateJSON) : {},
- query: extractQueryFromFilters(dash.searchSource.getOwn('filter')) || {query_string: {query: '*'}},
- filters: _.reject(dash.searchSource.getOwn('filter'), matchQueryFilter),
- };
-
- var $state = $scope.state = new AppState(stateDefaults);
- var $uiState = $scope.uiState = $state.makeStateful('uiState');
-
- $scope.$watchCollection('state.options', function (newVal, oldVal) {
- if (!angular.equals(newVal, oldVal)) $state.save();
- });
- $scope.$watch('state.options.darkTheme', setDarkTheme);
-
- $scope.configTemplate = new ConfigTemplate({
- save: require('plugins/kibana/dashboard/partials/save_dashboard.html'),
- load: require('plugins/kibana/dashboard/partials/load_dashboard.html'),
- share: require('plugins/kibana/dashboard/partials/share.html'),
- pickVis: require('plugins/kibana/dashboard/partials/pick_visualization.html'),
- options: require('plugins/kibana/dashboard/partials/options.html')
- });
-
- $scope.refresh = _.bindKey(courier, 'fetch');
-
- timefilter.enabled = true;
- $scope.timefilter = timefilter;
- $scope.$listen(timefilter, 'fetch', $scope.refresh);
-
- courier.setRootSearchSource(dash.searchSource);
-
- function init() {
- updateQueryOnRootSource();
-
- var docTitle = Private(require('ui/doc_title'));
- if (dash.id) {
- docTitle.change(dash.title);
- }
-
- initPanelIndices();
- $scope.$emit('application.load');
+ $scope.$on('$destroy', dash.destroy);
+
+ const matchQueryFilter = function (filter) {
+ return filter.query && filter.query.query_string && !filter.meta;
+ };
+
+ const extractQueryFromFilters = function (filters) {
+ const filter = _.find(filters, matchQueryFilter);
+ if (filter) return filter.query;
+ };
+
+ const stateDefaults = {
+ title: dash.title,
+ panels: dash.panelsJSON ? JSON.parse(dash.panelsJSON) : [],
+ options: dash.optionsJSON ? JSON.parse(dash.optionsJSON) : {},
+ uiState: dash.uiStateJSON ? JSON.parse(dash.uiStateJSON) : {},
+ query: extractQueryFromFilters(dash.searchSource.getOwn('filter')) || {query_string: {query: '*'}},
+ filters: _.reject(dash.searchSource.getOwn('filter'), matchQueryFilter),
+ };
+
+ const $state = $scope.state = new AppState(stateDefaults);
+ const $uiState = $scope.uiState = $state.makeStateful('uiState');
+
+ $scope.$watchCollection('state.options', function (newVal, oldVal) {
+ if (!angular.equals(newVal, oldVal)) $state.save();
+ });
+ $scope.$watch('state.options.darkTheme', setDarkTheme);
+
+ $scope.topNavMenu = [{
+ key: 'new',
+ description: 'New Dashboard',
+ run: function () { kbnUrl.change('/dashboard', {}); },
+ }, {
+ key: 'add',
+ description: 'Add a panel to the dashboard',
+ template: require('plugins/kibana/dashboard/partials/pick_visualization.html')
+ }, {
+ key: 'save',
+ description: 'Save Dashboard',
+ template: require('plugins/kibana/dashboard/partials/save_dashboard.html')
+ }, {
+ key: 'open',
+ description: 'Load Saved Dashboard',
+ template: require('plugins/kibana/dashboard/partials/load_dashboard.html')
+ }, {
+ key: 'share',
+ description: 'Share Dashboard',
+ template: require('plugins/kibana/dashboard/partials/share.html')
+ }, {
+ key: 'options',
+ description: 'Options',
+ template: require('plugins/kibana/dashboard/partials/options.html')
+ }];
+
+ $scope.refresh = _.bindKey(courier, 'fetch');
+
+ timefilter.enabled = true;
+ $scope.timefilter = timefilter;
+ $scope.$listen(timefilter, 'fetch', $scope.refresh);
+
+ courier.setRootSearchSource(dash.searchSource);
+
+ function init() {
+ updateQueryOnRootSource();
+
+ const docTitle = Private(DocTitleProvider);
+ if (dash.id) {
+ docTitle.change(dash.title);
}
- function initPanelIndices() {
- // find the largest panelIndex in all the panels
- var maxIndex = getMaxPanelIndex();
-
- // ensure that all panels have a panelIndex
- $scope.state.panels.forEach(function (panel) {
- if (!panel.panelIndex) {
- panel.panelIndex = maxIndex++;
- }
- });
- }
+ initPanelIndices();
+ $scope.$emit('application.load');
+ }
- function getMaxPanelIndex() {
- var index = $scope.state.panels.reduce(function (idx, panel) {
- // if panel is missing an index, add one and increment the index
- return Math.max(idx, panel.panelIndex || idx);
- }, 0);
- return ++index;
- }
+ function initPanelIndices() {
+ // find the largest panelIndex in all the panels
+ let maxIndex = getMaxPanelIndex();
- function updateQueryOnRootSource() {
- var filters = queryFilter.getFilters();
- if ($state.query) {
- dash.searchSource.set('filter', _.union(filters, [{
- query: $state.query
- }]));
- } else {
- dash.searchSource.set('filter', filters);
+ // ensure that all panels have a panelIndex
+ $scope.state.panels.forEach(function (panel) {
+ if (!panel.panelIndex) {
+ panel.panelIndex = maxIndex++;
}
- }
-
- function setDarkTheme(enabled) {
- var theme = Boolean(enabled) ? 'theme-dark' : 'theme-light';
- chrome.removeApplicationClass(['theme-dark', 'theme-light']);
- chrome.addApplicationClass(theme);
- }
-
- // update root source when filters update
- $scope.$listen(queryFilter, 'update', function () {
- updateQueryOnRootSource();
- $state.save();
});
+ }
- // update data when filters fire fetch event
- $scope.$listen(queryFilter, 'fetch', $scope.refresh);
-
- $scope.newDashboard = function () {
- kbnUrl.change('/dashboard', {});
- };
+ function getMaxPanelIndex() {
+ let index = $scope.state.panels.reduce(function (idx, panel) {
+ // if panel is missing an index, add one and increment the index
+ return Math.max(idx, panel.panelIndex || idx);
+ }, 0);
+ return ++index;
+ }
- $scope.filterResults = function () {
- updateQueryOnRootSource();
- $state.save();
- $scope.refresh();
- };
+ function updateQueryOnRootSource() {
+ const filters = queryFilter.getFilters();
+ if ($state.query) {
+ dash.searchSource.set('filter', _.union(filters, [{
+ query: $state.query
+ }]));
+ } else {
+ dash.searchSource.set('filter', filters);
+ }
+ }
- $scope.save = function () {
- $state.title = dash.id = dash.title;
- $state.save();
+ function setDarkTheme(enabled) {
+ const theme = Boolean(enabled) ? 'theme-dark' : 'theme-light';
+ chrome.removeApplicationClass(['theme-dark', 'theme-light']);
+ chrome.addApplicationClass(theme);
+ }
- dash.panelsJSON = angular.toJson($state.panels);
- dash.uiStateJSON = angular.toJson($uiState.getChanges());
- dash.timeFrom = dash.timeRestore ? timefilter.time.from : undefined;
- dash.timeTo = dash.timeRestore ? timefilter.time.to : undefined;
- dash.optionsJSON = angular.toJson($state.options);
-
- dash.save()
- .then(function (id) {
- $scope.configTemplate.close('save');
- if (id) {
- notify.info('Saved Dashboard as "' + dash.title + '"');
- if (dash.id !== $routeParams.id) {
- kbnUrl.change('/dashboard/{{id}}', {id: dash.id});
- }
+ // update root source when filters update
+ $scope.$listen(queryFilter, 'update', function () {
+ updateQueryOnRootSource();
+ $state.save();
+ });
+
+ // update data when filters fire fetch event
+ $scope.$listen(queryFilter, 'fetch', $scope.refresh);
+
+ $scope.newDashboard = function () {
+ kbnUrl.change('/dashboard', {});
+ };
+
+ $scope.filterResults = function () {
+ updateQueryOnRootSource();
+ $state.save();
+ $scope.refresh();
+ };
+
+ $scope.save = function () {
+ $state.title = dash.id = dash.title;
+ $state.save();
+
+ const timeRestoreObj = _.pick(timefilter.refreshInterval, ['display', 'pause', 'section', 'value']);
+ dash.panelsJSON = angular.toJson($state.panels);
+ dash.uiStateJSON = angular.toJson($uiState.getChanges());
+ dash.timeFrom = dash.timeRestore ? timefilter.time.from : undefined;
+ dash.timeTo = dash.timeRestore ? timefilter.time.to : undefined;
+ dash.refreshInterval = dash.timeRestore ? timeRestoreObj : undefined;
+ dash.optionsJSON = angular.toJson($state.options);
+
+ dash.save()
+ .then(function (id) {
+ $scope.kbnTopNav.close('save');
+ if (id) {
+ notify.info('Saved Dashboard as "' + dash.title + '"');
+ if (dash.id !== $routeParams.id) {
+ kbnUrl.change('/dashboard/{{id}}', {id: dash.id});
}
- })
- .catch(notify.fatal);
- };
-
- var pendingVis = _.size($state.panels);
- $scope.$on('ready:vis', function () {
- if (pendingVis) pendingVis--;
- if (pendingVis === 0) {
- $state.save();
- $scope.refresh();
}
- });
-
- // listen for notifications from the grid component that changes have
- // been made, rather than watching the panels deeply
- $scope.$on('change:vis', function () {
+ })
+ .catch(notify.fatal);
+ };
+
+ let pendingVis = _.size($state.panels);
+ $scope.$on('ready:vis', function () {
+ if (pendingVis) pendingVis--;
+ if (pendingVis === 0) {
$state.save();
- });
-
- // called by the saved-object-finder when a user clicks a vis
- $scope.addVis = function (hit) {
- pendingVis++;
- $state.panels.push({ id: hit.id, type: 'visualization', panelIndex: getMaxPanelIndex() });
- };
-
- $scope.addSearch = function (hit) {
- pendingVis++;
- $state.panels.push({ id: hit.id, type: 'search', panelIndex: getMaxPanelIndex() });
- };
-
- // Setup configurable values for config directive, after objects are initialized
- $scope.opts = {
- dashboard: dash,
- ui: $state.options,
- save: $scope.save,
- addVis: $scope.addVis,
- addSearch: $scope.addSearch,
- shareData: function () {
- return {
- link: $location.absUrl(),
- // This sucks, but seems like the cleanest way. Uhg.
- embed: ''
- };
- }
- };
-
- init();
- }
- };
- });
+ $scope.refresh();
+ }
+ });
+
+ // listen for notifications from the grid component that changes have
+ // been made, rather than watching the panels deeply
+ $scope.$on('change:vis', function () {
+ $state.save();
+ });
+
+ // called by the saved-object-finder when a user clicks a vis
+ $scope.addVis = function (hit) {
+ pendingVis++;
+ $state.panels.push({ id: hit.id, type: 'visualization', panelIndex: getMaxPanelIndex() });
+ };
+
+ $scope.addSearch = function (hit) {
+ pendingVis++;
+ $state.panels.push({ id: hit.id, type: 'search', panelIndex: getMaxPanelIndex() });
+ };
+
+ // Setup configurable values for config directive, after objects are initialized
+ $scope.opts = {
+ dashboard: dash,
+ ui: $state.options,
+ save: $scope.save,
+ addVis: $scope.addVis,
+ addSearch: $scope.addSearch,
+ timefilter: $scope.timefilter
+ };
+
+ init();
+ }
+ };
});
diff --git a/src/plugins/kibana/public/dashboard/partials/share.html b/src/plugins/kibana/public/dashboard/partials/share.html
index bf34366604db1..046acbb5c95b8 100644
--- a/src/plugins/kibana/public/dashboard/partials/share.html
+++ b/src/plugins/kibana/public/dashboard/partials/share.html
@@ -1,21 +1,4 @@
-
\ No newline at end of file
+
+
diff --git a/src/plugins/kibana/public/dashboard/services/_saved_dashboard.js b/src/plugins/kibana/public/dashboard/services/_saved_dashboard.js
index d7115a9bcfca4..5e0ef2f2cb9c0 100644
--- a/src/plugins/kibana/public/dashboard/services/_saved_dashboard.js
+++ b/src/plugins/kibana/public/dashboard/services/_saved_dashboard.js
@@ -1,65 +1,74 @@
-define(function (require) {
- var module = require('ui/modules').get('app/dashboard');
- var angular = require('angular');
- var _ = require('lodash');
- var moment = require('moment');
+import angular from 'angular';
+import _ from 'lodash';
+import moment from 'moment';
+import uiModules from 'ui/modules';
+const module = uiModules.get('app/dashboard');
- // Used only by the savedDashboards service, usually no reason to change this
- module.factory('SavedDashboard', function (courier, config) {
- // SavedDashboard constructor. Usually you'd interact with an instance of this.
- // ID is option, without it one will be generated on save.
- _.class(SavedDashboard).inherits(courier.SavedObject);
- function SavedDashboard(id) {
- // Gives our SavedDashboard the properties of a SavedObject
- SavedDashboard.Super.call(this, {
- type: SavedDashboard.type,
- mapping: SavedDashboard.mapping,
- searchSource: SavedDashboard.searchsource,
+// Used only by the savedDashboards service, usually no reason to change this
+module.factory('SavedDashboard', function (courier, config) {
+ // SavedDashboard constructor. Usually you'd interact with an instance of this.
+ // ID is option, without it one will be generated on save.
+ _.class(SavedDashboard).inherits(courier.SavedObject);
+ function SavedDashboard(id) {
+ // Gives our SavedDashboard the properties of a SavedObject
+ SavedDashboard.Super.call(this, {
+ type: SavedDashboard.type,
+ mapping: SavedDashboard.mapping,
+ searchSource: SavedDashboard.searchsource,
- // if this is null/undefined then the SavedObject will be assigned the defaults
- id: id,
+ // if this is null/undefined then the SavedObject will be assigned the defaults
+ id: id,
- // default values that will get assigned if the doc is new
- defaults: {
- title: 'New Dashboard',
- hits: 0,
- description: '',
- panelsJSON: '[]',
- optionsJSON: angular.toJson({
- darkTheme: config.get('dashboard:defaultDarkTheme')
- }),
- uiStateJSON: '{}',
- version: 1,
- timeRestore: false,
- timeTo: undefined,
- timeFrom: undefined,
- },
+ // default values that will get assigned if the doc is new
+ defaults: {
+ title: 'New Dashboard',
+ hits: 0,
+ description: '',
+ panelsJSON: '[]',
+ optionsJSON: angular.toJson({
+ darkTheme: config.get('dashboard:defaultDarkTheme')
+ }),
+ uiStateJSON: '{}',
+ version: 1,
+ timeRestore: false,
+ timeTo: undefined,
+ timeFrom: undefined,
+ refreshInterval: undefined
+ },
- // if an indexPattern was saved with the searchsource of a SavedDashboard
- // object, clear it. It was a mistake
- clearSavedIndexPattern: true
- });
- }
+ // if an indexPattern was saved with the searchsource of a SavedDashboard
+ // object, clear it. It was a mistake
+ clearSavedIndexPattern: true
+ });
+ }
- // save these objects with the 'dashboard' type
- SavedDashboard.type = 'dashboard';
+ // save these objects with the 'dashboard' type
+ SavedDashboard.type = 'dashboard';
- // if type:dashboard has no mapping, we push this mapping into ES
- SavedDashboard.mapping = {
- title: 'string',
- hits: 'integer',
- description: 'string',
- panelsJSON: 'string',
- optionsJSON: 'string',
- uiStateJSON: 'string',
- version: 'integer',
- timeRestore: 'boolean',
- timeTo: 'string',
- timeFrom: 'string',
- };
+ // if type:dashboard has no mapping, we push this mapping into ES
+ SavedDashboard.mapping = {
+ title: 'string',
+ hits: 'integer',
+ description: 'string',
+ panelsJSON: 'string',
+ optionsJSON: 'string',
+ uiStateJSON: 'string',
+ version: 'integer',
+ timeRestore: 'boolean',
+ timeTo: 'string',
+ timeFrom: 'string',
+ refreshInterval: {
+ type: 'object',
+ properties: {
+ display: {type: 'string'},
+ pause: { type: 'boolean'},
+ section: { type: 'integer'},
+ value: { type: 'integer'}
+ }
+ }
+ };
- SavedDashboard.searchsource = true;
+ SavedDashboard.searchsource = true;
- return SavedDashboard;
- });
+ return SavedDashboard;
});
diff --git a/src/plugins/kibana/public/dashboard/services/saved_dashboard_register.js b/src/plugins/kibana/public/dashboard/services/saved_dashboard_register.js
index df54e6c3abdbc..207a3298b9c69 100644
--- a/src/plugins/kibana/public/dashboard/services/saved_dashboard_register.js
+++ b/src/plugins/kibana/public/dashboard/services/saved_dashboard_register.js
@@ -1,5 +1,3 @@
-define(function (require) {
- return function savedDashboardFn(savedDashboards) {
- return savedDashboards;
- };
-});
+export default function savedDashboardFn(savedDashboards) {
+ return savedDashboards;
+};
diff --git a/src/plugins/kibana/public/dashboard/services/saved_dashboards.js b/src/plugins/kibana/public/dashboard/services/saved_dashboards.js
index a9a2f74b558e4..bca33d1a2f573 100644
--- a/src/plugins/kibana/public/dashboard/services/saved_dashboards.js
+++ b/src/plugins/kibana/public/dashboard/services/saved_dashboards.js
@@ -1,80 +1,94 @@
-define(function (require) {
- var module = require('ui/modules').get('app/dashboard');
- var _ = require('lodash');
- // bring in the factory
- require('plugins/kibana/dashboard/services/_saved_dashboard');
+import _ from 'lodash';
+import Scanner from 'ui/utils/scanner';
+import 'plugins/kibana/dashboard/services/_saved_dashboard';
+import uiModules from 'ui/modules';
+const module = uiModules.get('app/dashboard');
+// bring in the factory
- // Register this service with the saved object registry so it can be
- // edited by the object editor.
- require('plugins/kibana/settings/saved_object_registry').register({
- service: 'savedDashboards',
- title: 'dashboards'
+
+// Register this service with the saved object registry so it can be
+// edited by the object editor.
+require('plugins/kibana/settings/saved_object_registry').register({
+ service: 'savedDashboards',
+ title: 'dashboards'
+});
+
+// This is the only thing that gets injected into controllers
+module.service('savedDashboards', function (Promise, SavedDashboard, kbnIndex, es, kbnUrl) {
+ const scanner = new Scanner(es, {
+ index: kbnIndex,
+ type: 'dashboard'
});
- // This is the only thing that gets injected into controllers
- module.service('savedDashboards', function (Promise, SavedDashboard, kbnIndex, es, kbnUrl) {
- this.type = SavedDashboard.type;
- this.Class = SavedDashboard;
+ this.type = SavedDashboard.type;
+ this.Class = SavedDashboard;
- this.loaderProperties = {
- name: 'dashboards',
- noun: 'Dashboard',
- nouns: 'dashboards'
- };
+ this.loaderProperties = {
+ name: 'dashboards',
+ noun: 'Dashboard',
+ nouns: 'dashboards'
+ };
- // Returns a single dashboard by ID, should be the name of the dashboard
- this.get = function (id) {
- // Returns a promise that contains a dashboard which is a subclass of docSource
- return (new SavedDashboard(id)).init();
- };
+ // Returns a single dashboard by ID, should be the name of the dashboard
+ this.get = function (id) {
+ // Returns a promise that contains a dashboard which is a subclass of docSource
+ return (new SavedDashboard(id)).init();
+ };
- this.urlFor = function (id) {
- return kbnUrl.eval('#/dashboard/{{id}}', {id: id});
- };
+ this.urlFor = function (id) {
+ return kbnUrl.eval('#/dashboard/{{id}}', {id: id});
+ };
- this.delete = function (ids) {
- ids = !_.isArray(ids) ? [ids] : ids;
- return Promise.map(ids, function (id) {
- return (new SavedDashboard(id)).delete();
- });
- };
+ this.delete = function (ids) {
+ ids = !_.isArray(ids) ? [ids] : ids;
+ return Promise.map(ids, function (id) {
+ return (new SavedDashboard(id)).delete();
+ });
+ };
- this.find = function (searchString, size = 100) {
- var self = this;
- var body;
- if (searchString) {
- body = {
- query: {
- simple_query_string: {
- query: searchString + '*',
- fields: ['title^3', 'description'],
- default_operator: 'AND'
- }
+ this.scanAll = function (queryString, pageSize = 1000) {
+ return scanner.scanAndMap(queryString, {
+ pageSize,
+ docCount: Infinity
+ }, (hit) => this.mapHits(hit));
+ };
+
+ this.mapHits = function (hit) {
+ const source = hit._source;
+ source.id = hit._id;
+ source.url = this.urlFor(hit._id);
+ return source;
+ };
+
+ this.find = function (searchString, size = 100) {
+ let body;
+ if (searchString) {
+ body = {
+ query: {
+ simple_query_string: {
+ query: searchString + '*',
+ fields: ['title^3', 'description'],
+ default_operator: 'AND'
}
- };
- } else {
- body = { query: {match_all: {}}};
- }
+ }
+ };
+ } else {
+ body = { query: {match_all: {}}};
+ }
- return es.search({
- index: kbnIndex,
- type: 'dashboard',
- body: body,
- size: size
- })
- .then(function (resp) {
- return {
- total: resp.hits.total,
- hits: resp.hits.hits.map(function (hit) {
- var source = hit._source;
- source.id = hit._id;
- source.url = self.urlFor(hit._id);
- return source;
- })
- };
- });
- };
- });
+ return es.search({
+ index: kbnIndex,
+ type: 'dashboard',
+ body: body,
+ size: size
+ })
+ .then((resp) => {
+ return {
+ total: resp.hits.total,
+ hits: resp.hits.hits.map((hit) => this.mapHits(hit))
+ };
+ });
+ };
});
diff --git a/src/plugins/kibana/public/dashboard/styles/main.less b/src/plugins/kibana/public/dashboard/styles/main.less
index c55428e6e0091..a0cccdd1e6ddc 100644
--- a/src/plugins/kibana/public/dashboard/styles/main.less
+++ b/src/plugins/kibana/public/dashboard/styles/main.less
@@ -33,6 +33,24 @@ dashboard-grid {
width: 25px;
}
+ .gs-w {
+ border: 2px dashed transparent;
+
+ &:hover {
+ border-color: @kibanaGray4;
+
+ dashboard-panel {
+ .visualize-show-spy {
+ visibility: visible;
+ }
+ .panel .panel-heading .btn-group {
+ display: block !important;
+ }
+ }
+
+ }
+ }
+
i.remove {
cursor: pointer;
}
@@ -55,15 +73,23 @@ dashboard-grid {
display: flex;
flex-direction: column;
justify-content: flex-start;
+ border: 0 solid transparent;
.panel-heading {
+ padding: 0px 0px 0px 5px;
flex: 0 0 auto;
white-space: nowrap;
display: flex;
border-top-right-radius: 0;
border-top-left-radius: 0;
+ background-color: @white;
+ border: none;
- div.btn-group {
+ .btn-group {
+ a {
+ color: inherit;
+ }
+ display: none;
white-space: nowrap;
flex: 0 0 auto;
}
@@ -85,6 +111,10 @@ dashboard-grid {
}
}
+ .panel-move:hover {
+ cursor: move;
+ }
+
a {
color: @dashboard-panel-heading-link-color;
border: none;
@@ -96,6 +126,10 @@ dashboard-grid {
}
}
+ .visualize-show-spy {
+ visibility: hidden;
+ }
+
.load-error {
text-align: center;
font-size: 1em;
@@ -119,10 +153,6 @@ dashboard-grid {
}
}
-.dashboard-panel-picker > li.list-group-item {
- border-top: 0px;
-}
-
-.dashboard-load {
- margin: 10px;
+.dashboard-panel-picker > .list-group-item {
+ border-top: 0;
}
diff --git a/src/plugins/kibana/public/discover/__tests__/directives/discover_field.js b/src/plugins/kibana/public/discover/__tests__/directives/discover_field.js
index 48ce5a5e3f3de..7515cf94fc842 100644
--- a/src/plugins/kibana/public/discover/__tests__/directives/discover_field.js
+++ b/src/plugins/kibana/public/discover/__tests__/directives/discover_field.js
@@ -1,24 +1,25 @@
-var angular = require('angular');
-var $ = require('jquery');
-var _ = require('lodash');
-var sinon = require('auto-release-sinon');
-var ngMock = require('ngMock');
-var expect = require('expect.js');
+import angular from 'angular';
+import _ from 'lodash';
+import sinon from 'auto-release-sinon';
+import ngMock from 'ng_mock';
+import expect from 'expect.js';
+import $ from 'jquery';
+import 'ui/private';
+import 'plugins/kibana/discover/components/field_chooser/discover_field';
+import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
// Load the kibana app dependencies.
-require('ui/private');
-require('plugins/kibana/discover/components/field_chooser/discover_field');
describe('discoverField', function () {
- var $scope;
- var indexPattern;
- var $elem;
+ let $scope;
+ let indexPattern;
+ let $elem;
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private, $rootScope, $compile) {
$elem = angular.element('');
- indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
+ indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
_.assign($rootScope, {
field: indexPattern.fields.byName.extension,
diff --git a/src/plugins/kibana/public/discover/__tests__/directives/field_calculator.js b/src/plugins/kibana/public/discover/__tests__/directives/field_calculator.js
index e8b4a5c07aaae..e248be72b4f2c 100644
--- a/src/plugins/kibana/public/discover/__tests__/directives/field_calculator.js
+++ b/src/plugins/kibana/public/discover/__tests__/directives/field_calculator.js
@@ -1,30 +1,31 @@
-var _ = require('lodash');
-var ngMock = require('ngMock');
-var fieldCalculator = require('plugins/kibana/discover/components/field_chooser/lib/field_calculator');
-var expect = require('expect.js');
+import _ from 'lodash';
+import ngMock from 'ng_mock';
+import fieldCalculator from 'plugins/kibana/discover/components/field_chooser/lib/field_calculator';
+import expect from 'expect.js';
+import 'ui/private';
+import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
// Load the kibana app dependencies.
-require('ui/private');
-var indexPattern;
+let indexPattern;
describe('fieldCalculator', function () {
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private) {
- indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
+ indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
}));
it('should have a _countMissing that counts nulls & undefineds in an array', function () {
- var values = [['foo', 'bar'], 'foo', 'foo', undefined, ['foo', 'bar'], 'bar', 'baz', null, null, null, 'foo', undefined];
+ const values = [['foo', 'bar'], 'foo', 'foo', undefined, ['foo', 'bar'], 'bar', 'baz', null, null, null, 'foo', undefined];
expect(fieldCalculator._countMissing(values)).to.be(5);
});
describe('_groupValues', function () {
- var groups;
- var params;
- var values;
+ let groups;
+ let params;
+ let values;
beforeEach(function () {
values = [['foo', 'bar'], 'foo', 'foo', undefined, ['foo', 'bar'], 'bar', 'baz', null, null, null, 'foo', undefined];
params = {};
@@ -78,21 +79,21 @@ describe('fieldCalculator', function () {
});
describe('getFieldValues', function () {
- var hits;
+ let hits;
beforeEach(function () {
hits = _.each(require('fixtures/real_hits.js'), indexPattern.flattenHit);
});
it('Should return an array of values for _source fields', function () {
- var extensions = fieldCalculator.getFieldValues(hits, indexPattern.fields.byName.extension);
+ const extensions = fieldCalculator.getFieldValues(hits, indexPattern.fields.byName.extension);
expect(extensions).to.be.an(Array);
expect(_.filter(extensions, function (v) { return v === 'html'; }).length).to.be(8);
expect(_.uniq(_.clone(extensions)).sort()).to.eql(['gif', 'html', 'php', 'png']);
});
it('Should return an array of values for core meta fields', function () {
- var types = fieldCalculator.getFieldValues(hits, indexPattern.fields.byName._type);
+ const types = fieldCalculator.getFieldValues(hits, indexPattern.fields.byName._type);
expect(types).to.be.an(Array);
expect(_.filter(types, function (v) { return v === 'apache'; }).length).to.be(18);
expect(_.uniq(_.clone(types)).sort()).to.eql(['apache', 'nginx']);
@@ -101,7 +102,7 @@ describe('fieldCalculator', function () {
describe('getFieldValueCounts', function () {
- var params;
+ let params;
beforeEach(function () {
params = {
hits: require('fixtures/real_hits.js'),
@@ -111,7 +112,7 @@ describe('fieldCalculator', function () {
});
it('counts the top 3 values', function () {
- var extensions = fieldCalculator.getFieldValueCounts(params);
+ const extensions = fieldCalculator.getFieldValueCounts(params);
expect(extensions).to.be.an(Object);
expect(extensions.buckets).to.be.an(Array);
expect(extensions.buckets.length).to.be(3);
diff --git a/src/plugins/kibana/public/discover/__tests__/directives/field_chooser.js b/src/plugins/kibana/public/discover/__tests__/directives/field_chooser.js
index d1e4328c471a1..8f555b96b90d2 100644
--- a/src/plugins/kibana/public/discover/__tests__/directives/field_chooser.js
+++ b/src/plugins/kibana/public/discover/__tests__/directives/field_chooser.js
@@ -1,46 +1,49 @@
-
-var angular = require('angular');
-var ngMock = require('ngMock');
-var $ = require('jquery');
-var _ = require('lodash');
-var sinon = require('auto-release-sinon');
-var expect = require('expect.js');
+import angular from 'angular';
+import ngMock from 'ng_mock';
+import _ from 'lodash';
+import sinon from 'auto-release-sinon';
+import expect from 'expect.js';
+import $ from 'jquery';
+import 'ui/private';
+import 'plugins/kibana/discover/components/field_chooser/field_chooser';
+import FixturesHitsProvider from 'fixtures/hits';
+import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
// Load the kibana app dependencies.
-require('ui/private');
-require('plugins/kibana/discover/components/field_chooser/field_chooser');
-var $parentScope;
-var $scope;
-var config;
-var hits;
-var indexPattern;
-var indexPatternList;
+let $parentScope;
+let $scope;
+let config;
+let hits;
+let indexPattern;
+let indexPatternList;
+let shortDotsValue;
// Sets up the directive, take an element, and a list of properties to attach to the parent scope.
-var init = function ($elem, props) {
+const init = function ($elem, props) {
ngMock.inject(function ($rootScope, $compile, $timeout, _config_) {
+ shortDotsValue = _config_.get('shortDots:enable');
config = _config_;
+ config.set('shortDots:enable', false);
$parentScope = $rootScope;
_.assign($parentScope, props);
$compile($elem)($parentScope);
// Required for test to run solo. Sigh
- $timeout(function () {
- $elem.scope().$digest();
- }, 0);
+ $timeout(() => $elem.scope().$digest(), 0);
$scope = $elem.isolateScope();
});
};
-var destroy = function () {
+const destroy = function () {
$scope.$destroy();
$parentScope.$destroy();
+ config.set('shortDots:enable', shortDotsValue);
};
describe('discover field chooser directives', function () {
- var $elem = angular.element(
+ const $elem = angular.element(
' destroy());
- var getSections = function (ctx) {
+ const getSections = function (ctx) {
return {
selected: $('.discover-selected-fields', ctx),
popular: $('.discover-popular-fields', ctx),
@@ -99,29 +100,38 @@ describe('discover field chooser directives', function () {
describe('Field listing', function () {
it('should have Selected Fields, Fields and Popular Fields sections', function (done) {
- var headers = $elem.find('.sidebar-list-header');
+ const headers = $elem.find('.sidebar-list-header');
expect(headers.length).to.be(3);
done();
});
it('should have 2 popular fields, 1 unpopular field and no selected fields', function (done) {
- var section = getSections($elem);
+ const section = getSections($elem);
+ const popular = find('popular');
+ const unpopular = find('unpopular');
expect(section.selected.find('li').length).to.be(0);
- expect(section.popular.text()).to.contain('ssl');
- expect(section.popular.text()).to.contain('@timestamp');
- expect(section.popular.text()).to.not.contain('ip\n');
+ expect(popular).to.contain('ssl');
+ expect(popular).to.contain('@timestamp');
+ expect(popular).to.not.contain('ip\n');
- expect(section.unpopular.text()).to.contain('extension');
- expect(section.unpopular.text()).to.contain('machine.os');
- expect(section.unpopular.text()).to.not.contain('ssl');
+ expect(unpopular).to.contain('extension');
+ expect(unpopular).to.contain('machine.os');
+ expect(unpopular).to.not.contain('ssl');
done();
+
+ function find(popularity) {
+ return section[popularity]
+ .find('.discover-field-name')
+ .map((i, el) => $(el).text())
+ .toArray();
+ }
});
it('should show the popular fields header if there are popular fields', function (done) {
- var section = getSections($elem);
+ const section = getSections($elem);
expect(section.popular.hasClass('ng-hide')).to.be(false);
expect(section.popular.find('li:not(.sidebar-list-header)').length).to.be.above(0);
done();
@@ -141,7 +151,7 @@ describe('discover field chooser directives', function () {
indexPattern: indexPattern
});
- var section = getSections($elem);
+ const section = getSections($elem);
$scope.$digest();
expect(section.popular.hasClass('ng-hide')).to.be(true);
@@ -150,7 +160,7 @@ describe('discover field chooser directives', function () {
});
it('should move the field into selected when it is added to the columns array', function (done) {
- var section = getSections($elem);
+ const section = getSections($elem);
$scope.columns.push('bytes');
$scope.$digest();
@@ -169,7 +179,7 @@ describe('discover field chooser directives', function () {
});
describe('details processing', function () {
- var field;
+ let field;
function getField() { return _.find($scope.fields, { name: 'bytes' }); }
beforeEach(function () {
diff --git a/src/plugins/kibana/public/discover/__tests__/hit_sort_fn.js b/src/plugins/kibana/public/discover/__tests__/hit_sort_fn.js
index 3380732b02b59..197f499d0720c 100644
--- a/src/plugins/kibana/public/discover/__tests__/hit_sort_fn.js
+++ b/src/plugins/kibana/public/discover/__tests__/hit_sort_fn.js
@@ -1,28 +1,29 @@
-var _ = require('lodash');
-var ngMock = require('ngMock');
-var expect = require('expect.js');
+import _ from 'lodash';
+import ngMock from 'ng_mock';
+import expect from 'expect.js';
+import PluginsKibanaDiscoverHitSortFnProvider from 'plugins/kibana/discover/_hit_sort_fn';
describe('hit sort function', function () {
- var createHitSortFn;
+ let createHitSortFn;
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private) {
- createHitSortFn = Private(require('plugins/kibana/discover/_hit_sort_fn'));
+ createHitSortFn = Private(PluginsKibanaDiscoverHitSortFnProvider);
}));
- var runSortTest = function (dir, sortOpts) {
- var groupSize = _.random(10, 30);
- var total = sortOpts.length * groupSize;
+ const runSortTest = function (dir, sortOpts) {
+ const groupSize = _.random(10, 30);
+ const total = sortOpts.length * groupSize;
sortOpts = sortOpts.map(function (opt) {
if (_.isArray(opt)) return opt;
else return [opt];
});
- var sortOptLength = sortOpts.length;
+ const sortOptLength = sortOpts.length;
- var hits = _.times(total, function (i) {
+ const hits = _.times(total, function (i) {
return {
_source: {},
sort: sortOpts[i % sortOptLength]
@@ -31,7 +32,7 @@ describe('hit sort function', function () {
hits.sort(createHitSortFn(dir))
.forEach(function (hit, i) {
- var group = Math.floor(i / groupSize);
+ const group = Math.floor(i / groupSize);
expect(hit.sort).to.eql(sortOpts[group]);
});
};
diff --git a/src/plugins/kibana/public/discover/_hit_sort_fn.js b/src/plugins/kibana/public/discover/_hit_sort_fn.js
index 7699396b4c297..83cc68c160504 100644
--- a/src/plugins/kibana/public/discover/_hit_sort_fn.js
+++ b/src/plugins/kibana/public/discover/_hit_sort_fn.js
@@ -29,18 +29,18 @@ define(function () {
* @return {[type]} [description]
*/
return function createHitSortFn(direction) {
- var descending = (direction === 'desc');
+ const descending = (direction === 'desc');
return function sortHits(hitA, hitB) {
- var bBelowa = null;
+ let bBelowa = null;
- var aSorts = hitA.sort || [];
- var bSorts = hitB.sort || [];
+ const aSorts = hitA.sort || [];
+ const bSorts = hitB.sort || [];
// walk each sort value, and compair until one is different
- for (var i = 0; i < bSorts.length; i++) {
- var a = aSorts[i];
- var b = bSorts[i];
+ for (let i = 0; i < bSorts.length; i++) {
+ const a = aSorts[i];
+ const b = bSorts[i];
if (a == null || b > a) {
bBelowa = !descending;
diff --git a/src/plugins/kibana/public/discover/components/field_chooser/discover_field.js b/src/plugins/kibana/public/discover/components/field_chooser/discover_field.js
index 2a073ceaafda7..62358fa61a4b3 100644
--- a/src/plugins/kibana/public/discover/components/field_chooser/discover_field.js
+++ b/src/plugins/kibana/public/discover/components/field_chooser/discover_field.js
@@ -1,94 +1,93 @@
-define(function (require) {
- var $ = require('jquery');
- var app = require('ui/modules').get('apps/discover');
- var html = require('plugins/kibana/discover/components/field_chooser/discover_field.html');
- var _ = require('lodash');
-
- require('ui/directives/css_truncate');
- require('ui/directives/field_name');
-
-
- app.directive('discoverField', function ($compile) {
- return {
- restrict: 'E',
- template: html,
- replace: true,
- link: function ($scope, $elem) {
- var detailsElem;
- var detailScope = $scope.$new();
-
- var detailsHtml = require('plugins/kibana/discover/components/field_chooser/lib/detail_views/string.html');
-
- var init = function () {
- if ($scope.field.details) {
- $scope.toggleDetails($scope.field, true);
+import $ from 'jquery';
+import html from 'plugins/kibana/discover/components/field_chooser/discover_field.html';
+import _ from 'lodash';
+import 'ui/directives/css_truncate';
+import 'ui/directives/field_name';
+import detailsHtml from 'plugins/kibana/discover/components/field_chooser/lib/detail_views/string.html';
+import uiModules from 'ui/modules';
+const app = uiModules.get('apps/discover');
+
+
+
+app.directive('discoverField', function ($compile) {
+ return {
+ restrict: 'E',
+ template: html,
+ replace: true,
+ link: function ($scope, $elem) {
+ let detailsElem;
+ let detailScope = $scope.$new();
+
+
+ const init = function () {
+ if ($scope.field.details) {
+ $scope.toggleDetails($scope.field, true);
+ }
+ };
+
+ const getWarnings = function (field) {
+ let warnings = [];
+
+ if (!field.scripted) {
+ if (!field.doc_values && field.type !== 'boolean' && !(field.analyzed && field.type === 'string')) {
+ warnings.push('Doc values are not enabled on this field. This may lead to excess heap consumption when visualizing.');
}
- };
- var getWarnings = function (field) {
- var warnings = [];
-
- if (!field.scripted) {
- if (!field.doc_values && field.type !== 'boolean' && !(field.analyzed && field.type === 'string')) {
- warnings.push('Doc values are not enabled on this field. This may lead to excess heap consumption when visualizing.');
- }
-
- if (field.analyzed && field.type === 'string') {
- warnings.push('This is an analyzed string field.' +
- ' Analyzed strings are highly unique and can use a lot of memory to visualize.' +
- ' Values such as foo-bar will be broken into foo and bar.');
- }
-
- if (!field.indexed) {
- warnings.push('This field is not indexed and can not be visualized.');
- }
+ if (field.analyzed && field.type === 'string') {
+ warnings.push('This is an analyzed string field.' +
+ ' Analyzed strings are highly unique and can use a lot of memory to visualize.' +
+ ' Values such as foo-bar will be broken into foo and bar.');
}
-
- if (field.scripted) {
- warnings.push('Scripted fields can take a long time to execute.');
+ if (!field.indexed) {
+ warnings.push('This field is not indexed and might not be usable in visualizations.');
}
-
- if (warnings.length > 1) {
- warnings = warnings.map(function (warning, i) {
- return (i > 0 ? '\n' : '') + (i + 1) + ' - ' + warning;
- });
- }
-
- return warnings;
-
- };
-
- $scope.toggleDisplay = function (field) {
- // inheritted param to fieldChooser
- $scope.toggle(field.name);
- if (field.display) $scope.increaseFieldCounter(field);
-
- // we are now displaying the field, kill it's details
- if (field.details) {
- $scope.toggleDetails(field);
- }
- };
-
- $scope.toggleDetails = function (field, recompute) {
- if (_.isUndefined(field.details) || recompute) {
- // This is inherited from fieldChooser
- $scope.details(field, recompute);
- detailScope.$destroy();
- detailScope = $scope.$new();
- detailScope.warnings = getWarnings(field);
-
- detailsElem = $(detailsHtml);
- $compile(detailsElem)(detailScope);
- $elem.append(detailsElem);
- } else {
- delete field.details;
- detailsElem.remove();
- }
- };
-
- init();
- }
- };
- });
+ }
+
+
+ if (field.scripted) {
+ warnings.push('Scripted fields can take a long time to execute.');
+ }
+
+ if (warnings.length > 1) {
+ warnings = warnings.map(function (warning, i) {
+ return (i > 0 ? '\n' : '') + (i + 1) + ' - ' + warning;
+ });
+ }
+
+ return warnings;
+
+ };
+
+ $scope.toggleDisplay = function (field) {
+ // This is inherited from fieldChooser
+ $scope.toggle(field.name);
+ if (field.display) $scope.increaseFieldCounter(field);
+
+ if (field.details) {
+ $scope.toggleDetails(field);
+ }
+ };
+
+ $scope.toggleDetails = function (field, recompute) {
+ if (_.isUndefined(field.details) || recompute) {
+ // This is inherited from fieldChooser
+ $scope.details(field, recompute);
+ detailScope.$destroy();
+ detailScope = $scope.$new();
+ detailScope.warnings = getWarnings(field);
+
+ detailsElem = $(detailsHtml);
+ $compile(detailsElem)(detailScope);
+ $elem.append(detailsElem).addClass('active');
+ } else {
+ delete field.details;
+ detailsElem.remove();
+ $elem.removeClass('active');
+ }
+ };
+
+ init();
+ }
+ };
});
diff --git a/src/plugins/kibana/public/discover/components/field_chooser/field_chooser.html b/src/plugins/kibana/public/discover/components/field_chooser/field_chooser.html
index 4302d6a81e829..a336ec5621d5c 100644
--- a/src/plugins/kibana/public/discover/components/field_chooser/field_chooser.html
+++ b/src/plugins/kibana/public/discover/components/field_chooser/field_chooser.html
@@ -1,6 +1,7 @@