Skip to content

Commit

Permalink
Merge remote-tracking branch 'es/master' into ccr
Browse files Browse the repository at this point in the history
* es/master:
  Add remote cluster client (#29495)
  Ensure flush happens on shard idle
  Adds SpanGapQueryBuilder in the query DSL (#28636)
  Control max size and count of warning headers (#28427)
  Make index APIs work without types. (#29479)
  Deprecate filtering on `_type`. (#29468)
  Fix auto-generated ID example format (#29461)
  Fix typo in max number of threads check docs (#29469)
  Add primary term to translog header (#29227)
  Add a helper method to get a random java.util.TimeZone (#29487)
  Move TimeValue into elasticsearch-core project (#29486)
  Fix NPE in InternalGeoCentroidTests#testReduceRandom (#29481)
  Build: introduce keystoreFile for cluster config (#29491)
  test: Index more docs, so that it is less likely the search request does not time out.
  • Loading branch information
martijnvg committed Apr 13, 2018
2 parents 0dd61fc + 694e2a9 commit 9da3e73
Show file tree
Hide file tree
Showing 98 changed files with 1,722 additions and 678 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,8 @@ class ClusterConfiguration {

Map<String, String> keystoreSettings = new HashMap<>()

Map<String, Object> keystoreFiles = new HashMap<>()

// map from destination path, to source file
Map<String, Object> extraConfigFiles = new HashMap<>()

Expand All @@ -167,6 +169,15 @@ class ClusterConfiguration {
keystoreSettings.put(name, value)
}

/**
* Adds a file to the keystore. The name is the secure setting name, and the sourceFile
* is anything accepted by project.file()
*/
@Input
void keystoreFile(String name, Object sourceFile) {
keystoreFiles.put(name, sourceFile)
}

@Input
void plugin(String path) {
Project pluginProject = project.project(path)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ class ClusterFormationTasks {
setup = configureWriteConfigTask(taskName(prefix, node, 'configure'), project, setup, node, seedNode)
setup = configureCreateKeystoreTask(taskName(prefix, node, 'createKeystore'), project, setup, node)
setup = configureAddKeystoreSettingTasks(prefix, project, setup, node)
setup = configureAddKeystoreFileTasks(prefix, project, setup, node)

if (node.config.plugins.isEmpty() == false) {
if (node.nodeVersion == VersionProperties.elasticsearch) {
Expand Down Expand Up @@ -323,7 +324,7 @@ class ClusterFormationTasks {

/** Adds a task to create keystore */
static Task configureCreateKeystoreTask(String name, Project project, Task setup, NodeInfo node) {
if (node.config.keystoreSettings.isEmpty()) {
if (node.config.keystoreSettings.isEmpty() && node.config.keystoreFiles.isEmpty()) {
return setup
} else {
/*
Expand Down Expand Up @@ -357,6 +358,37 @@ class ClusterFormationTasks {
return parentTask
}

/** Adds tasks to add files to the keystore */
static Task configureAddKeystoreFileTasks(String parent, Project project, Task setup, NodeInfo node) {
Map<String, Object> kvs = node.config.keystoreFiles
if (kvs.isEmpty()) {
return setup
}
Task parentTask = setup
/*
* We have to delay building the string as the path will not exist during configuration which will fail on Windows due to getting
* the short name requiring the path to already exist.
*/
final Object esKeystoreUtil = "${-> node.binPath().resolve('elasticsearch-keystore').toString()}"
for (Map.Entry<String, Object> entry in kvs) {
String key = entry.getKey()
String name = taskName(parent, node, 'addToKeystore#' + key)
String srcFileName = entry.getValue()
Task t = configureExecTask(name, project, parentTask, node, esKeystoreUtil, 'add-file', key, srcFileName)
t.doFirst {
File srcFile = project.file(srcFileName)
if (srcFile.isDirectory()) {
throw new GradleException("Source for keystoreFile must be a file: ${srcFile}")
}
if (srcFile.exists() == false) {
throw new GradleException("Source file for keystoreFile does not exist: ${srcFile}")
}
}
parentTask = t
}
return parentTask
}

static Task configureExtraConfigFilesTask(String name, Project project, Task setup, NodeInfo node) {
if (node.config.extraConfigFiles.isEmpty()) {
return setup
Expand Down
12 changes: 6 additions & 6 deletions docs/painless/painless-getting-started.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ their last name:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -260,7 +260,7 @@ names start with a consonant and end with a vowel:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -281,7 +281,7 @@ remove all of the vowels in all of their last names:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -297,7 +297,7 @@ method so it supports `$1` and `\1` for replacements:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -319,7 +319,7 @@ This will make all of the vowels in the hockey player's last names upper case:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand All @@ -337,7 +337,7 @@ last names upper case:

[source,js]
----------------------------------------------------------------
POST hockey/player/_update_by_query
POST hockey/_update_by_query
{
"script": {
"lang": "painless",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ Example:

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query" : {
"match" : {"content" : "Bird flu"}
Expand Down Expand Up @@ -153,7 +153,7 @@ We can drill down into examples of these documents to see why pozmantier is conn

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query": {
"simple_query_string": {
Expand Down Expand Up @@ -221,7 +221,7 @@ with the `filter_duplicate_text` setting turned on:

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query": {
"match": {
Expand Down Expand Up @@ -424,7 +424,7 @@ context:

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query" : {
"match" : {
Expand Down Expand Up @@ -463,7 +463,7 @@ will be analyzed using the `source_fields` parameter:

[source,js]
--------------------------------------------------
GET news/article/_search
GET news/_search
{
"query" : {
"match" : {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ had a value.

[source,js]
--------------------------------------------------
GET latency/data/_search
GET latency/_search
{
"size": 0,
"aggs" : {
Expand Down
8 changes: 4 additions & 4 deletions docs/reference/docs/delete-by-query.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ Back to the API format, this will delete tweets from the `twitter` index:

[source,js]
--------------------------------------------------
POST twitter/_doc/_delete_by_query?conflicts=proceed
POST twitter/_delete_by_query?conflicts=proceed
{
"query": {
"match_all": {}
Expand All @@ -85,12 +85,12 @@ POST twitter/_doc/_delete_by_query?conflicts=proceed
// CONSOLE
// TEST[setup:twitter]

It's also possible to delete documents of multiple indexes and multiple
types at once, just like the search API:
It's also possible to delete documents of multiple indexes at once, just like
the search API:

[source,js]
--------------------------------------------------
POST twitter,blog/_docs,post/_delete_by_query
POST twitter,blog/_delete_by_query
{
"query": {
"match_all": {}
Expand Down
4 changes: 2 additions & 2 deletions docs/reference/docs/index_.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -229,14 +229,14 @@ The result of the above index operation is:
},
"_index" : "twitter",
"_type" : "_doc",
"_id" : "6a8ca01c-7896-48e9-81cc-9f70661fcb32",
"_id" : "W0tpsmIBdwcYyG50zbta",
"_version" : 1,
"_seq_no" : 0,
"_primary_term" : 1,
"result": "created"
}
--------------------------------------------------
// TESTRESPONSE[s/6a8ca01c-7896-48e9-81cc-9f70661fcb32/$body._id/ s/"successful" : 2/"successful" : 1/]
// TESTRESPONSE[s/W0tpsmIBdwcYyG50zbta/$body._id/ s/"successful" : 2/"successful" : 1/]

[float]
[[index-routing]]
Expand Down
8 changes: 4 additions & 4 deletions docs/reference/docs/update-by-query.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ Back to the API format, this will update tweets from the `twitter` index:

[source,js]
--------------------------------------------------
POST twitter/_doc/_update_by_query?conflicts=proceed
POST twitter/_update_by_query?conflicts=proceed
--------------------------------------------------
// CONSOLE
// TEST[setup:twitter]
Expand Down Expand Up @@ -145,12 +145,12 @@ This API doesn't allow you to move the documents it touches, just modify their
source. This is intentional! We've made no provisions for removing the document
from its original location.

It's also possible to do this whole thing on multiple indexes and multiple
types at once, just like the search API:
It's also possible to do this whole thing on multiple indexes at once, just
like the search API:

[source,js]
--------------------------------------------------
POST twitter,blog/_doc,post/_update_by_query
POST twitter,blog/_update_by_query
--------------------------------------------------
// CONSOLE
// TEST[s/^/PUT twitter\nPUT blog\n/]
Expand Down
2 changes: 1 addition & 1 deletion docs/reference/modules/cluster/misc.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -82,4 +82,4 @@ Enable or disable allocation for persistent tasks:
This setting does not affect the persistent tasks that are already being executed.
Only newly created persistent tasks, or tasks that must be reassigned (after a node
left the cluster, for example), are impacted by this setting.
--
--
8 changes: 7 additions & 1 deletion docs/reference/modules/http.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ http://en.wikipedia.org/wiki/Chunked_transfer_encoding[HTTP chunking].

The settings in the table below can be configured for HTTP. Note that none of
them are dynamically updatable so for them to take effect they should be set in
`elasticsearch.yml`.
the Elasticsearch <<settings, configuration file>>.

[cols="<,<",options="header",]
|=======================================================================
Expand Down Expand Up @@ -100,6 +100,12 @@ simple message will be returned. Defaults to `true`

|`http.pipelining.max_events` |The maximum number of events to be queued up in memory before a HTTP connection is closed, defaults to `10000`.

|`http.max_warning_header_count` |The maximum number of warning headers in
client HTTP responses, defaults to unbounded.

|`http.max_warning_header_size` |The maximum total size of warning headers in
client HTTP responses, defaults to unbounded.

|=======================================================================

It also uses the common
Expand Down
12 changes: 1 addition & 11 deletions docs/reference/search/search.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@ that match the query. The query can either be provided using a simple
All search APIs can be applied across multiple types within an index, and
across multiple indices with support for the
<<multi-index,multi index syntax>>. For
example, we can search on all documents across all types within the
twitter index:
example, we can search on all documents within the twitter index:

[source,js]
--------------------------------------------------
Expand All @@ -22,15 +21,6 @@ GET /twitter/_search?q=user:kimchy
// CONSOLE
// TEST[setup:twitter]

We can also search within specific types:

[source,js]
--------------------------------------------------
GET /twitter/tweet,user/_search?q=user:kimchy
--------------------------------------------------
// CONSOLE
// TEST[setup:twitter]

We can also search all tweets with a certain tag across several indices
(for example, when each user has his own index):

Expand Down
2 changes: 1 addition & 1 deletion docs/reference/setup/bootstrap-checks.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ that the Elasticsearch process has the rights to create enough threads
under normal use. This check is enforced only on Linux. If you are on
Linux, to pass the maximum number of threads check, you must configure
your system to allow the Elasticsearch process the ability to create at
least 2048 threads. This can be done via `/etc/security/limits.conf`
least 4096 threads. This can be done via `/etc/security/limits.conf`
using the `nproc` setting (note that you might have to increase the
limits for the `root` user too).

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,12 @@

package org.elasticsearch.common.unit;

import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import java.util.concurrent.TimeUnit;

public class TimeValue implements Comparable<TimeValue>, ToXContentFragment {
public class TimeValue implements Comparable<TimeValue> {

/** How many nano-seconds in one milli-second */
public static final long NSEC_PER_MSEC = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS);
Expand Down Expand Up @@ -352,9 +349,4 @@ public int compareTo(TimeValue timeValue) {
double otherValue = ((double) timeValue.duration) * timeValue.timeUnit.toNanos(1);
return Double.compare(thisValue, otherValue);
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.value(toString());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,10 @@

package org.elasticsearch.common.unit;

import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase;

import java.io.IOException;
import java.util.concurrent.TimeUnit;

import static org.elasticsearch.common.unit.TimeValue.timeValueNanos;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.containsString;
Expand Down Expand Up @@ -154,31 +149,6 @@ private String randomTimeUnit() {
return randomFrom("nanos", "micros", "ms", "s", "m", "h", "d");
}

private void assertEqualityAfterSerialize(TimeValue value, int expectedSize) throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
out.writeTimeValue(value);
assertEquals(expectedSize, out.size());

StreamInput in = out.bytes().streamInput();
TimeValue inValue = in.readTimeValue();

assertThat(inValue, equalTo(value));
assertThat(inValue.duration(), equalTo(value.duration()));
assertThat(inValue.timeUnit(), equalTo(value.timeUnit()));
}

public void testSerialize() throws Exception {
assertEqualityAfterSerialize(new TimeValue(100, TimeUnit.DAYS), 3);
assertEqualityAfterSerialize(timeValueNanos(-1), 2);
assertEqualityAfterSerialize(timeValueNanos(1), 2);
assertEqualityAfterSerialize(timeValueSeconds(30), 2);

final TimeValue timeValue = new TimeValue(randomIntBetween(0, 1024), randomFrom(TimeUnit.values()));
BytesStreamOutput out = new BytesStreamOutput();
out.writeZLong(timeValue.duration());
assertEqualityAfterSerialize(timeValue, 1 + out.bytes().length());
}

public void testFailOnUnknownUnits() {
try {
TimeValue.parseTimeValue("23tw", null, "test");
Expand Down
Loading

0 comments on commit 9da3e73

Please sign in to comment.