Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into snapshot-lifecycle-…
Browse files Browse the repository at this point in the history
…management
  • Loading branch information
dakrone committed Apr 29, 2019
2 parents e13ebd1 + be39915 commit 47b5655
Show file tree
Hide file tree
Showing 83 changed files with 1,215 additions and 498 deletions.
4 changes: 2 additions & 2 deletions Vagrantfile
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ def linux_common(config,
SHELL

config.vm.provision 'jdk-11', type: 'shell', inline: <<-SHELL
curl -sSL https://download.java.net/java/GA/jdk11/9/GPL/openjdk-11.0.2_linux-x64_bin.tar.gz | tar xz -C /opt/
curl -sSL https://download.oracle.com/java/GA/jdk11/9/GPL/openjdk-11.0.2_linux-x64_bin.tar.gz | tar xz -C /opt/
SHELL

# This prevents leftovers from previous tests using the
Expand Down Expand Up @@ -405,7 +405,7 @@ def windows_common(config, name)

config.vm.provision 'windows-jdk-11', type: 'shell', inline: <<-SHELL
New-Item -ItemType Directory -Force -Path "C:/java"
Invoke-WebRequest "https://download.java.net/java/GA/jdk11/9/GPL/openjdk-11.0.2_windows-x64_bin.zip" -OutFile "C:/java/jdk-11.zip"
Invoke-WebRequest "https://download.oracle.com/java/GA/jdk11/9/GPL/openjdk-11.0.2_windows-x64_bin.zip" -OutFile "C:/java/jdk-11.zip"
Expand-Archive -Path "C:/java/jdk-11.zip" -DestinationPath "C:/java/"
SHELL

Expand Down
2 changes: 1 addition & 1 deletion buildSrc/version.properties
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
elasticsearch = 8.0.0
lucene = 8.1.0-snapshot-e460356abe

bundled_jdk = 12+33
bundled_jdk = 12.0.1+12@69cfe15208a647278a19ef0990eea691

# optional dependencies
spatial4j = 0.7
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.client.dataframe.transforms;

import org.elasticsearch.client.dataframe.transforms.pivot.PivotConfig;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
Expand All @@ -38,13 +39,15 @@ public class DataFrameTransformConfig implements ToXContentObject {
public static final ParseField ID = new ParseField("id");
public static final ParseField SOURCE = new ParseField("source");
public static final ParseField DEST = new ParseField("dest");
public static final ParseField DESCRIPTION = new ParseField("description");
// types of transforms
public static final ParseField PIVOT_TRANSFORM = new ParseField("pivot");

private final String id;
private final SourceConfig source;
private final DestConfig dest;
private final PivotConfig pivotConfig;
private final String description;

public static final ConstructingObjectParser<DataFrameTransformConfig, Void> PARSER =
new ConstructingObjectParser<>("data_frame_transform", true,
Expand All @@ -53,29 +56,47 @@ public class DataFrameTransformConfig implements ToXContentObject {
SourceConfig source = (SourceConfig) args[1];
DestConfig dest = (DestConfig) args[2];
PivotConfig pivotConfig = (PivotConfig) args[3];
return new DataFrameTransformConfig(id, source, dest, pivotConfig);
String description = (String)args[4];
return new DataFrameTransformConfig(id, source, dest, pivotConfig, description);
});

static {
PARSER.declareString(constructorArg(), ID);
PARSER.declareObject(constructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), SOURCE);
PARSER.declareObject(constructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DEST);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p), PIVOT_TRANSFORM);
PARSER.declareString(optionalConstructorArg(), DESCRIPTION);
}

public static DataFrameTransformConfig fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}

/**
* Helper method for previewing a data frame transform configuration
*
* The DataFrameTransformConfig returned from this method should only be used for previewing the resulting data.
*
* A new, valid, DataFrameTransformConfig with an appropriate destination and ID will have to be constructed to create
* the transform.
* @param source Source configuration for gathering the data
* @param pivotConfig Pivot config to preview
* @return A DataFrameTransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
*/
public static DataFrameTransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
return new DataFrameTransformConfig(null, source, null, pivotConfig, null);
}

public DataFrameTransformConfig(final String id,
final SourceConfig source,
final DestConfig dest,
final PivotConfig pivotConfig) {
final PivotConfig pivotConfig,
final String description) {
this.id = id;
this.source = source;
this.dest = dest;
this.pivotConfig = pivotConfig;
this.description = description;
}

public String getId() {
Expand All @@ -94,6 +115,11 @@ public PivotConfig getPivotConfig() {
return pivotConfig;
}

@Nullable
public String getDescription() {
return description;
}

@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
Expand All @@ -109,6 +135,9 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa
if (pivotConfig != null) {
builder.field(PIVOT_TRANSFORM.getPreferredName(), pivotConfig);
}
if (description != null) {
builder.field(DESCRIPTION.getPreferredName(), description);
}
builder.endObject();
return builder;
}
Expand All @@ -128,12 +157,13 @@ public boolean equals(Object other) {
return Objects.equals(this.id, that.id)
&& Objects.equals(this.source, that.source)
&& Objects.equals(this.dest, that.dest)
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.pivotConfig, that.pivotConfig);
}

@Override
public int hashCode() {
return Objects.hash(id, source, dest, pivotConfig);
return Objects.hash(id, source, dest, pivotConfig, description);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,8 @@ private DataFrameTransformConfig validDataFrameTransformConfig(String id, String
return new DataFrameTransformConfig(id,
new SourceConfig(new String[]{source}, queryConfig),
destConfig,
pivotConfig);
pivotConfig,
"this is a test transform");
}

public void testGetStats() throws Exception {
Expand All @@ -329,7 +330,10 @@ public void testGetStats() throws Exception {

String id = "test-get-stats";
DataFrameTransformConfig transform = new DataFrameTransformConfig(id,
new SourceConfig(new String[]{sourceIndex}, queryConfig), new DestConfig("pivot-dest"), pivotConfig);
new SourceConfig(new String[]{sourceIndex}, queryConfig),
new DestConfig("pivot-dest"),
pivotConfig,
"transform for testing stats");

DataFrameClient client = highLevelClient().dataFrame();
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,12 @@ public void testValidate() {
containsString("preview requires a non-null data frame config"));

// null id and destination is valid
DataFrameTransformConfig config = new DataFrameTransformConfig(null, randomSourceConfig(), null,
PivotConfigTests.randomPivotConfig());
DataFrameTransformConfig config = DataFrameTransformConfig.forPreview(randomSourceConfig(), PivotConfigTests.randomPivotConfig());

assertFalse(new PreviewDataFrameTransformRequest(config).validate().isPresent());

// null source is not valid
config = new DataFrameTransformConfig(null, null, null, PivotConfigTests.randomPivotConfig());
config = new DataFrameTransformConfig(null, null, null, PivotConfigTests.randomPivotConfig(), null);

Optional<ValidationException> error = new PreviewDataFrameTransformRequest(config).validate();
assertTrue(error.isPresent());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public class PutDataFrameTransformRequestTests extends AbstractXContentTestCase<
public void testValidate() {
assertFalse(createTestInstance().validate().isPresent());

DataFrameTransformConfig config = new DataFrameTransformConfig(null, null, null, PivotConfigTests.randomPivotConfig());
DataFrameTransformConfig config = new DataFrameTransformConfig(null, null, null, PivotConfigTests.randomPivotConfig(), null);

Optional<ValidationException> error = new PutDataFrameTransformRequest(config).validate();
assertTrue(error.isPresent());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public class DataFrameTransformConfigTests extends AbstractXContentTestCase<Data

public static DataFrameTransformConfig randomDataFrameTransformConfig() {
return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), randomSourceConfig(),
randomDestConfig(), PivotConfigTests.randomPivotConfig());
randomDestConfig(), PivotConfigTests.randomPivotConfig(), randomBoolean() ? null : randomAlphaOfLengthBetween(1, 100));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,8 @@ public void testPutDataFrameTransform() throws IOException, InterruptedException
new DataFrameTransformConfig("reviewer-avg-rating", // <1>
sourceConfig, // <2>
new DestConfig("pivot-destination"), // <3>
pivotConfig); // <4>
pivotConfig, // <4>
"This is my test transform"); // <5>
// end::put-data-frame-transform-config

{
Expand All @@ -161,7 +162,7 @@ public void testPutDataFrameTransform() throws IOException, InterruptedException
{
DataFrameTransformConfig configWithDifferentId = new DataFrameTransformConfig("reviewer-avg-rating2",
transformConfig.getSource(), transformConfig.getDestination(),
transformConfig.getPivotConfig());
transformConfig.getPivotConfig(), null);
PutDataFrameTransformRequest request = new PutDataFrameTransformRequest(configWithDifferentId);

// tag::put-data-frame-transform-execute-listener
Expand Down Expand Up @@ -205,7 +206,7 @@ public void testStartStop() throws IOException, InterruptedException {
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);

DataFrameTransformConfig transformConfig = new DataFrameTransformConfig("mega-transform",
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("pivot-dest"), pivotConfig);
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("pivot-dest"), pivotConfig, null);

client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT);
transformsToClean.add(transformConfig.getId());
Expand Down Expand Up @@ -320,9 +321,9 @@ public void testDeleteDataFrameTransform() throws IOException, InterruptedExcept
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);

DataFrameTransformConfig transformConfig1 = new DataFrameTransformConfig("mega-transform",
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("pivot-dest"), pivotConfig);
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("pivot-dest"), pivotConfig, null);
DataFrameTransformConfig transformConfig2 = new DataFrameTransformConfig("mega-transform2",
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("pivot-dest2"), pivotConfig);
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("pivot-dest2"), pivotConfig, null);

client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig1), RequestOptions.DEFAULT);
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig2), RequestOptions.DEFAULT);
Expand Down Expand Up @@ -386,11 +387,9 @@ public void testPreview() throws IOException, InterruptedException {

// tag::preview-data-frame-transform-request
DataFrameTransformConfig transformConfig =
new DataFrameTransformConfig(null, // <1>
new SourceConfig(new String[]{"source-data"}, queryConfig),
null, // <2>
pivotConfig);

DataFrameTransformConfig.forPreview(
new SourceConfig(new String[]{"source-data"}, queryConfig), // <1>
pivotConfig); // <2>
PreviewDataFrameTransformRequest request =
new PreviewDataFrameTransformRequest(transformConfig); // <3>
// end::preview-data-frame-transform-request
Expand Down Expand Up @@ -447,7 +446,7 @@ public void testGetStats() throws IOException, InterruptedException {

String id = "statisitcal-transform";
DataFrameTransformConfig transformConfig = new DataFrameTransformConfig(id,
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("dest"), pivotConfig);
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("dest"), pivotConfig, null);
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT);

// tag::get-data-frame-transform-stats-request
Expand Down Expand Up @@ -526,7 +525,7 @@ public void testGetDataFrameTransform() throws IOException, InterruptedException

DataFrameTransformConfig putTransformConfig = new DataFrameTransformConfig("mega-transform",
new SourceConfig(new String[]{"source-data"}, queryConfig),
new DestConfig("pivot-dest"), pivotConfig);
new DestConfig("pivot-dest"), pivotConfig, null);

RestHighLevelClient client = highLevelClient();
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(putTransformConfig), RequestOptions.DEFAULT);
Expand Down
13 changes: 11 additions & 2 deletions distribution/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -224,22 +224,31 @@ xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule ->
*****************************************************************************/
// extract the bundled jdk version, broken into elements as: [feature, interim, update, build]
// Note the "patch" version is not yet handled here, as it has not yet been used by java.
Pattern JDK_VERSION = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+)")
Pattern JDK_VERSION = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+)@([a-f0-9]{32})?")
Matcher jdkVersionMatcher = JDK_VERSION.matcher(VersionProperties.bundledJdk)
if (jdkVersionMatcher.matches() == false) {
throw new IllegalArgumentException("Malformed jdk version [" + VersionProperties.bundledJdk + "]")
}
String jdkVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : "")
String jdkMajor = jdkVersionMatcher.group(1)
String jdkBuild = jdkVersionMatcher.group(3)
String hash = jdkVersionMatcher.group(4)

repositories {
// simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back
ivy {
url "https://download.java.net"
url "https://download.oracle.com"
patternLayout {
artifact "java/GA/jdk${jdkMajor}/${jdkBuild}/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"
}
}
// current pattern since 12.0.1
ivy {
url "https://download.oracle.com"
patternLayout {
artifact "java/GA/jdk${jdkVersion}/${hash}/${jdkBuild}/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"
}
}
}
for (String platform : ['linux', 'darwin', 'windows']) {
String jdkConfigName = "jdk_${platform}"
Expand Down
3 changes: 2 additions & 1 deletion distribution/src/bin/elasticsearch-plugin
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#!/bin/bash

ES_MAIN_CLASS=org.elasticsearch.plugins.PluginCli \
ES_JAVA_OPTS="--add-opens java.base/sun.security.provider=ALL-UNNAMED $ES_JAVA_OPTS" \
ES_MAIN_CLASS=org.elasticsearch.plugins.PluginCli \
ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/plugin-cli \
"`dirname "$0"`"/elasticsearch-cli \
"$@"
1 change: 1 addition & 0 deletions distribution/src/bin/elasticsearch-plugin.bat
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
setlocal enabledelayedexpansion
setlocal enableextensions

set ES_JAVA_OPTS=--add-opens java.base/sun.security.provider=ALL-UNNAMED %ES_JAVA_OPTS%
set ES_MAIN_CLASS=org.elasticsearch.plugins.PluginCli
set ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/plugin-cli
call "%~dp0elasticsearch-cli.bat" ^
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ A +{request}+ takes a single argument: a valid data frame transform config.
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> The transform Id may be null for the preview
<2> The destination may be null for the preview
<1> The source config from which the data should be gathered
<2> The pivot config used to transform the data
<3> The configuration of the {dataframe-job} to preview

include::../execution.asciidoc[]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ include-tagged::{doc-tests-file}[{api}-config]
<2> The source indices and query from which to gather data
<3> The destination index
<4> The PivotConfig
<5> Optional free text description of the transform

[id="{upid}-{api}-query-config"]

Expand Down
5 changes: 4 additions & 1 deletion docs/reference/data-frames/apis/put-transform.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ a `query`.
`pivot`:: Defines the pivot function `group by` fields and the aggregation to
reduce the data.

`description`:: Optional free text description of the data frame transform


//==== Authorization

Expand Down Expand Up @@ -73,7 +75,8 @@ PUT _data_frame/transforms/ecommerce_transform
}
}
}
}
},
"description": "Maximum priced ecommerce data by customer_id in Asia"
}
--------------------------------------------------
// CONSOLE
Expand Down
5 changes: 5 additions & 0 deletions docs/reference/docs/bulk.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -283,3 +283,8 @@ POST _bulk
=== Security

See <<url-access-control>>.

[float]
[[bulk-partial-responses]]
=== Partial responses
To ensure fast responses, the multi search API will respond with partial results if one or more shards fail. See <<shard-failures, Shard failures>> for more information.
22 changes: 17 additions & 5 deletions docs/reference/docs/data-replication.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,24 @@ is as follows:
. Combine the results and respond. Note that in the case of get by ID look up, only one shard is relevant and this step can be skipped.

[float]
==== Failure handling
[[shard-failures]]
==== Shard failures

When a shard fails to respond to a read request, the coordinating node sends the
request to another shard copy in the same replication group. Repeated failures
can result in no available shard copies.

To ensure fast responses, the following APIs will
respond with partial results if one or more shards fail:

* <<search-search, Search>>
* <<search-multi-search, Multi Search>>
* <<docs-bulk, Bulk>>
* <<docs-multi-get, Multi Get>>

When a shard fails to respond to a read request, the coordinating node will select another copy from the same replication group
and send the shard level search request to that copy instead. Repetitive failures can result in no shard copies being available.
In some cases, such as `_search`, Elasticsearch will prefer to respond fast, albeit with partial results, instead of waiting
for the issue to be resolved (partial results are indicated in the `_shards` header of the response).
Responses containing partial results still provide a `200 OK` HTTP status code.
Shard failures are indicated by the `timed_out` and `_shards` fields of
the response header.

[float]
=== A few simple implications
Expand Down
Loading

0 comments on commit 47b5655

Please sign in to comment.