diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 3b11e2ce3106e..3229b7e768137 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -340,13 +340,13 @@ be downloaded again unless they have been updated to a new version. + . Run the tests with `./gradlew packagingTest`. This will cause Gradle to build the tar, zip, and deb packages and all the plugins. It will then run the tests -on ubuntu-1404 and centos-7. We chose those two distributions as the default +on ubuntu-1604 and centos-7. We chose those two distributions as the default because they cover deb and rpm packaging and SyvVinit and systemd. You can choose which boxes to test by setting the `-Pvagrant.boxes` project property. All of the valid options for this property are: -* `sample` - The default, only chooses ubuntu-1404 and centos-7 +* `sample` - The default, only chooses ubuntu-1604 and centos-7 * List of box names, comma separated (e.g. `oel-7,fedora-28`) - Chooses exactly the boxes listed. * `linux-all` - All linux boxes. * `windows-all` - All Windows boxes. If there are any Windows boxes which do not @@ -364,11 +364,10 @@ will remain running and you'll have to stop them manually with `./gradlew stop` All the regular vagrant commands should just work so you can get a shell in a VM running trusty by running -`vagrant up ubuntu-1404 --provider virtualbox && vagrant ssh ubuntu-1404`. +`vagrant up ubuntu-1604 --provider virtualbox && vagrant ssh ubuntu-1604`. These are the linux flavors supported, all of which we provide images for -* ubuntu-1404 aka trusty * ubuntu-1604 aka xenial * ubuntu-1804 aka bionic beaver * debian-8 aka jessie @@ -422,13 +421,13 @@ It's important to think of VMs like cattle. If they become lame you just shoot them and let vagrant reprovision them. Say you've hosed your precise VM: ---------------------------------------------------- -vagrant ssh ubuntu-1404 -c 'sudo rm -rf /bin'; echo oops +vagrant ssh ubuntu-1604 -c 'sudo rm -rf /bin'; echo oops ---------------------------------------------------- All you've got to do to get another one is ---------------------------------------------- -vagrant destroy -f ubuntu-1404 && vagrant up ubuntu-1404 --provider virtualbox +vagrant destroy -f ubuntu-1604 && vagrant up ubuntu-1604 --provider virtualbox ---------------------------------------------- The whole process takes a minute and a half on a modern laptop, two and a half diff --git a/Vagrantfile b/Vagrantfile index 4624172b02ada..4fbf1beeb04e9 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -46,12 +46,6 @@ Vagrant.configure(2) do |config| PROJECT_DIR = ENV['VAGRANT_PROJECT_DIR'] || Dir.pwd config.vm.synced_folder PROJECT_DIR, '/project' - 'ubuntu-1404'.tap do |box| - config.vm.define box, define_opts do |config| - config.vm.box = 'elastic/ubuntu-14.04-x86_64' - deb_common config, box - end - end 'ubuntu-1604'.tap do |box| config.vm.define box, define_opts do |config| config.vm.box = 'elastic/ubuntu-16.04-x86_64' diff --git a/build.gradle b/build.gradle index f6c3222a4074a..64d51f0938026 100644 --- a/build.gradle +++ b/build.gradle @@ -162,8 +162,8 @@ task verifyVersions { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = false -final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/40319" /* place a PR link here when committing bwc changes */ +boolean bwc_tests_enabled = true +final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index be54b2c68f639..9d25532d4cef1 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -245,6 +245,7 @@ if (project != rootProject) { forbiddenPatterns { exclude '**/*.wav' + exclude '**/*.p12' // the file that actually defines nocommit exclude '**/ForbiddenPatternsTask.java' } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 97074965a76f8..8a0e7a05327b4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -361,16 +361,8 @@ class BuildPlugin implements Plugin { compilerJavaHome = findJavaHome(compilerJavaProperty) } if (compilerJavaHome == null) { - if (System.getProperty("idea.active") != null || System.getProperty("eclipse.launcher") != null) { - // IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with - return Jvm.current().javaHome - } else { - throw new GradleException( - "JAVA_HOME must be set to build Elasticsearch. " + - "Note that if the variable was just set you might have to run `./gradlew --stop` for " + - "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details." - ) - } + // if JAVA_HOME does not set,so we use the JDK that Gradle was run with. + return Jvm.current().javaHome } return compilerJavaHome } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index 4bdef1ff6fd30..763b5509772af 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -31,7 +31,6 @@ class VagrantTestPlugin implements Plugin { 'oel-7', 'opensuse-42', 'sles-12', - 'ubuntu-1404', 'ubuntu-1604', 'ubuntu-1804' ]) @@ -48,7 +47,7 @@ class VagrantTestPlugin implements Plugin { /** Boxes used when sampling the tests **/ static final List SAMPLE = unmodifiableList([ 'centos-7', - 'ubuntu-1404' + 'ubuntu-1604' ]) /** All distributions to bring into test VM, whether or not they are used **/ diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java b/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java new file mode 100644 index 0000000000000..a8680ef13dda0 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java @@ -0,0 +1,233 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.http; + +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; + +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.KeyManager; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.SecureRandom; +import java.security.cert.Certificate; +import java.security.cert.CertificateFactory; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; +import java.util.Enumeration; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +/** + * A utility to wait for a specific HTTP resource to be available, optionally with customized TLS trusted CAs. + * This is logically similar to using the Ant Get task to retrieve a resource, but with the difference that it can + * access resources that do not use the JRE's default trusted CAs. + */ +public class WaitForHttpResource { + + private static final Logger logger = Logging.getLogger(WaitForHttpResource.class); + + private Set validResponseCodes = Collections.singleton(200); + private URL url; + private Set certificateAuthorities; + private File trustStoreFile; + private String trustStorePassword; + private String username; + private String password; + + public WaitForHttpResource(String protocol, String host, int numberOfNodes) throws MalformedURLException { + this(new URL(protocol + "://" + host + "/_cluster/health?wait_for_nodes=>=" + numberOfNodes + "&wait_for_status=yellow")); + } + + public WaitForHttpResource(URL url) { + this.url = url; + } + + public void setValidResponseCodes(int... validResponseCodes) { + this.validResponseCodes = new HashSet<>(validResponseCodes.length); + for (int rc : validResponseCodes) { + this.validResponseCodes.add(rc); + } + } + + public void setCertificateAuthorities(File... certificateAuthorities) { + this.certificateAuthorities = new HashSet<>(Arrays.asList(certificateAuthorities)); + } + + public void setTrustStoreFile(File trustStoreFile) { + this.trustStoreFile = trustStoreFile; + } + + public void setTrustStorePassword(String trustStorePassword) { + this.trustStorePassword = trustStorePassword; + } + + public void setUsername(String username) { + this.username = username; + } + + public void setPassword(String password) { + this.password = password; + } + + public boolean wait(int durationInMs) throws GeneralSecurityException, InterruptedException, IOException { + final long waitUntil = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(durationInMs); + final long sleep = Long.max(durationInMs / 10, 100); + + final SSLContext ssl; + final KeyStore trustStore = buildTrustStore(); + if (trustStore != null) { + ssl = createSslContext(trustStore); + } else { + ssl = null; + } + IOException failure = null; + for (; ; ) { + try { + checkResource(ssl); + return true; + } catch (IOException e) { + logger.debug("Failed to access resource [{}]", url, e); + failure = e; + } + if (System.nanoTime() < waitUntil) { + Thread.sleep(sleep); + } else { + logger.error("Failed to access url [{}]", url, failure); + return false; + } + } + } + + protected void checkResource(SSLContext ssl) throws IOException { + try { + final HttpURLConnection connection = buildConnection(ssl); + connection.connect(); + final Integer response = connection.getResponseCode(); + if (validResponseCodes.contains(response)) { + logger.info("Got successful response [{}] from URL [{}]", response, url); + return; + } else { + throw new IOException(response + " " + connection.getResponseMessage()); + } + } catch (IOException e) { + throw e; + } + } + + HttpURLConnection buildConnection(SSLContext ssl) throws IOException { + final HttpURLConnection connection = (HttpURLConnection) this.url.openConnection(); + configureSslContext(connection, ssl); + configureBasicAuth(connection); + connection.setRequestMethod("GET"); + return connection; + } + + private void configureSslContext(HttpURLConnection connection, SSLContext ssl) { + if (ssl != null) { + if (connection instanceof HttpsURLConnection) { + ((HttpsURLConnection) connection).setSSLSocketFactory(ssl.getSocketFactory()); + } else { + throw new IllegalStateException("SSL trust has been configured, but [" + url + "] is not a 'https' URL"); + } + } + } + + private void configureBasicAuth(HttpURLConnection connection) { + if (username != null) { + if (password == null) { + throw new IllegalStateException("Basic Auth user [" + username + + "] has been set, but no password has been configured"); + } + connection.setRequestProperty("Authorization", + "Basic " + Base64.getEncoder().encodeToString((username + ":" + password).getBytes(StandardCharsets.UTF_8))); + } + } + + KeyStore buildTrustStore() throws GeneralSecurityException, IOException { + if (this.certificateAuthorities != null) { + if (trustStoreFile != null) { + throw new IllegalStateException("Cannot specify both truststore and CAs"); + } + return buildTrustStoreFromCA(); + } else if (trustStoreFile != null) { + return buildTrustStoreFromFile(); + } else { + return null; + } + } + + private KeyStore buildTrustStoreFromFile() throws GeneralSecurityException, IOException { + KeyStore keyStore = KeyStore.getInstance(trustStoreFile.getName().endsWith(".jks") ? "JKS" : "PKCS12"); + try (InputStream input = new FileInputStream(trustStoreFile)) { + keyStore.load(input, trustStorePassword == null ? null : trustStorePassword.toCharArray()); + } + return keyStore; + } + + private KeyStore buildTrustStoreFromCA() throws GeneralSecurityException, IOException { + final KeyStore store = KeyStore.getInstance(KeyStore.getDefaultType()); + store.load(null, null); + final CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); + int counter = 0; + for (File ca : certificateAuthorities) { + try (InputStream input = new FileInputStream(ca)) { + for (Certificate certificate : certFactory.generateCertificates(input)) { + store.setCertificateEntry("cert-" + counter, certificate); + counter++; + } + } + } + return store; + } + + private SSLContext createSslContext(KeyStore trustStore) throws GeneralSecurityException { + checkForTrustEntry(trustStore); + TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + tmf.init(trustStore); + SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); + sslContext.init(new KeyManager[0], tmf.getTrustManagers(), new SecureRandom()); + return sslContext; + } + + private void checkForTrustEntry(KeyStore trustStore) throws KeyStoreException { + Enumeration enumeration = trustStore.aliases(); + while (enumeration.hasMoreElements()) { + if (trustStore.isCertificateEntry(enumeration.nextElement())) { + // found trusted cert entry + return; + } + } + throw new IllegalStateException("Trust-store does not contain any trusted certificate entries"); + } +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/http/WaitForHttpResourceTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/http/WaitForHttpResourceTests.java new file mode 100644 index 0000000000000..67bae367c6f9f --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/http/WaitForHttpResourceTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.http; + +import org.elasticsearch.gradle.test.GradleUnitTestCase; + +import java.io.File; +import java.net.URL; +import java.security.KeyStore; +import java.security.cert.Certificate; +import java.security.cert.X509Certificate; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.notNullValue; + +public class WaitForHttpResourceTests extends GradleUnitTestCase { + + public void testBuildTrustStoreFromFile() throws Exception { + final WaitForHttpResource http = new WaitForHttpResource(new URL("https://localhost/")); + final URL ca = getClass().getResource("/ca.p12"); + assertThat(ca, notNullValue()); + http.setTrustStoreFile(new File(ca.getPath())); + http.setTrustStorePassword("password"); + final KeyStore store = http.buildTrustStore(); + final Certificate certificate = store.getCertificate("ca"); + assertThat(certificate, notNullValue()); + assertThat(certificate, instanceOf(X509Certificate.class)); + assertThat(((X509Certificate)certificate).getSubjectDN().toString(), equalTo("CN=Elastic Certificate Tool Autogenerated CA")); + } + + public void testBuildTrustStoreFromCA() throws Exception { + final WaitForHttpResource http = new WaitForHttpResource(new URL("https://localhost/")); + final URL ca = getClass().getResource("/ca.pem"); + assertThat(ca, notNullValue()); + http.setCertificateAuthorities(new File(ca.getPath())); + final KeyStore store = http.buildTrustStore(); + final Certificate certificate = store.getCertificate("cert-0"); + assertThat(certificate, notNullValue()); + assertThat(certificate, instanceOf(X509Certificate.class)); + assertThat(((X509Certificate)certificate).getSubjectDN().toString(), equalTo("CN=Elastic Certificate Tool Autogenerated CA")); + } +} diff --git a/buildSrc/src/test/resources/ca.p12 b/buildSrc/src/test/resources/ca.p12 new file mode 100644 index 0000000000000..cc44494515b9f Binary files /dev/null and b/buildSrc/src/test/resources/ca.p12 differ diff --git a/buildSrc/src/test/resources/ca.pem b/buildSrc/src/test/resources/ca.pem new file mode 100644 index 0000000000000..8dda1767e4838 --- /dev/null +++ b/buildSrc/src/test/resources/ca.pem @@ -0,0 +1,25 @@ +Bag Attributes + friendlyName: ca + localKeyID: 54 69 6D 65 20 31 35 35 33 37 34 33 38 39 30 38 33 35 +subject=/CN=Elastic Certificate Tool Autogenerated CA +issuer=/CN=Elastic Certificate Tool Autogenerated CA +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIVAMQMmDRcXfXLaTp6ep1H8rC3tOrwMA0GCSqGSIb3DQEB +CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu +ZXJhdGVkIENBMB4XDTE5MDMyODAzMzEyNloXDTIyMDMyNzAzMzEyNlowNDEyMDAG +A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDT73N6JZeBPyzahc0aNcra +BpUROVGB9wXQqf8JeU4GtH+1qfqUKYKUJTe/DZWc+5Qz1WAKGZEvBySAlgbuncuq +VpLzWxpEui1vRW8JB3gjZgeY3vfErrEWWr95YM0e8rWu4AoAchzqsrG0/+po2eui +cN+8hI6jRKiBv/ZeQqja6KZ8y4Wt4VaNVL53+I7+eWA/aposu6/piUg2wZ/FNhVK +hypcJwDdp3fQaugtPj3y76303jTRgutgd3rtWFuy3MCDLfs3mSQUjO10s93zwLdC +XokyIywijS5CpO8mEuDRu9rb5J1DzwUpUfk+GMObb6rHjFKzSqnM3s+nasypQQ9L +AgMBAAGjUzBRMB0GA1UdDgQWBBQZEW88R95zSzO2tLseEWgI7ugvLzAfBgNVHSME +GDAWgBQZEW88R95zSzO2tLseEWgI7ugvLzAPBgNVHRMBAf8EBTADAQH/MA0GCSqG +SIb3DQEBCwUAA4IBAQBEJN0UbL77usVnzIvxKa3GpLBgJQAZtD1ifZppC4w46Bul +1G7Fdc+XMbzZlI4K6cWEdd5dfEssKA8btEtRzdNOqgggBpqrUU0mNlQ+vC22XORU +ykHAu2TsRwoHmuxkd9Et/QyuTFXR4fTiU8rsJuLFOgn+RdEblA0J0gJeIqdWI5Z1 +z13OyZEl6BCQFyrntu2eERxaHEfsJOSBZE4RcecnLNGhIJBXE0Pk4iTiViJF/h7d ++kUUegKx0qewZif2eEZgrz12Vuen9a6bh2i2pNS95vABVVMr8uB+J1BGkNA5YT7J +qtZA2tN//Evng7YDiR+KkB1kvXVZVIi2WPDLD/zu +-----END CERTIFICATE----- diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformState.java index fd191bb600ca6..248ee9a18f53f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformState.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformState.java @@ -43,14 +43,16 @@ public class DataFrameTransformState { private static final ParseField TASK_STATE = new ParseField("task_state"); private static final ParseField CURRENT_POSITION = new ParseField("current_position"); private static final ParseField GENERATION = new ParseField("generation"); + private static final ParseField REASON = new ParseField("reason"); @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>("data_frame_transform_state", + new ConstructingObjectParser<>("data_frame_transform_state", true, args -> new DataFrameTransformState((DataFrameTransformTaskState) args[0], (IndexerState) args[1], (HashMap) args[2], - (long) args[3])); + (long) args[3], + (String) args[4])); static { PARSER.declareField(constructorArg(), @@ -68,6 +70,7 @@ public class DataFrameTransformState { throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); }, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY); PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), GENERATION); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REASON); } public static DataFrameTransformState fromXContent(XContentParser parser) throws IOException { @@ -78,15 +81,18 @@ public static DataFrameTransformState fromXContent(XContentParser parser) throws private final IndexerState indexerState; private final long generation; private final SortedMap currentPosition; + private final String reason; public DataFrameTransformState(DataFrameTransformTaskState taskState, IndexerState indexerState, @Nullable Map position, - long generation) { + long generation, + @Nullable String reason) { this.taskState = taskState; this.indexerState = indexerState; this.currentPosition = position == null ? null : Collections.unmodifiableSortedMap(new TreeMap<>(position)); this.generation = generation; + this.reason = reason; } public IndexerState getIndexerState() { @@ -106,6 +112,11 @@ public long getGeneration() { return generation; } + @Nullable + public String getReason() { + return reason; + } + @Override public boolean equals(Object other) { if (this == other) { @@ -121,11 +132,13 @@ public boolean equals(Object other) { return Objects.equals(this.taskState, that.taskState) && Objects.equals(this.indexerState, that.indexerState) && Objects.equals(this.currentPosition, that.currentPosition) && - this.generation == that.generation; + this.generation == that.generation && + Objects.equals(this.reason, that.reason); } @Override public int hashCode() { - return Objects.hash(taskState, indexerState, currentPosition, generation); + return Objects.hash(taskState, indexerState, currentPosition, generation, reason); } + } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java index e8724cc071dae..3e564a86207ba 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java @@ -358,6 +358,7 @@ public void testGetStats() throws Exception { DataFrameTransformStateAndStats stateAndStats = response.getTransformsStateAndStats().get(0); assertEquals(IndexerState.STARTED, stateAndStats.getTransformState().getIndexerState()); assertEquals(DataFrameTransformTaskState.STARTED, stateAndStats.getTransformState().getTaskState()); + assertEquals(null, stateAndStats.getTransformState().getReason()); assertNotEquals(zeroIndexerStats, stateAndStats.getTransformStats()); }); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStateTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStateTests.java index 17dc388948167..fa1ac4202f9dd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStateTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/DataFrameTransformStateTests.java @@ -36,7 +36,8 @@ public void testFromXContent() throws IOException { DataFrameTransformStateTests::randomDataFrameTransformState, DataFrameTransformStateTests::toXContent, DataFrameTransformState::fromXContent) - .supportsUnknownFields(false) + .supportsUnknownFields(true) + .randomFieldsExcludeFilter(field -> field.equals("current_position")) .test(); } @@ -44,7 +45,8 @@ public static DataFrameTransformState randomDataFrameTransformState() { return new DataFrameTransformState(randomFrom(DataFrameTransformTaskState.values()), randomFrom(IndexerState.values()), randomPositionMap(), - randomLongBetween(0,10)); + randomLongBetween(0,10), + randomBoolean() ? null : randomAlphaOfLength(10)); } public static void toXContent(DataFrameTransformState state, XContentBuilder builder) throws IOException { @@ -55,6 +57,9 @@ public static void toXContent(DataFrameTransformState state, XContentBuilder bui builder.field("current_position", state.getPosition()); } builder.field("generation", state.getGeneration()); + if (state.getReason() != null) { + builder.field("reason", state.getReason()); + } builder.endObject(); } diff --git a/distribution/src/bin/elasticsearch-env.bat b/distribution/src/bin/elasticsearch-env.bat index bd34880e40ece..f1cdc2fd22457 100644 --- a/distribution/src/bin/elasticsearch-env.bat +++ b/distribution/src/bin/elasticsearch-env.bat @@ -65,5 +65,5 @@ rem check the Java version %JAVA% -cp "%ES_CLASSPATH%" "org.elasticsearch.tools.java_version_checker.JavaVersionChecker" || exit /b 1 if not defined ES_TMPDIR ( - for /f "tokens=* usebackq" %%a in (`"%JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.TempDirectory""`) do set ES_TMPDIR=%%a + for /f "tokens=* usebackq" %%a in (`CALL %JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.TempDirectory"`) do set ES_TMPDIR=%%a ) diff --git a/distribution/src/bin/elasticsearch.bat b/distribution/src/bin/elasticsearch.bat index 7df6f19fc0765..ecbbad826e797 100644 --- a/distribution/src/bin/elasticsearch.bat +++ b/distribution/src/bin/elasticsearch.bat @@ -41,9 +41,9 @@ IF ERRORLEVEL 1 ( EXIT /B %ERRORLEVEL% ) -set "ES_JVM_OPTIONS=%ES_PATH_CONF%\jvm.options" +set ES_JVM_OPTIONS=%ES_PATH_CONF%\jvm.options @setlocal -for /F "usebackq delims=" %%a in (`"%JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.JvmOptionsParser" "!ES_JVM_OPTIONS!" || echo jvm_options_parser_failed"`) do set JVM_OPTIONS=%%a +for /F "usebackq delims=" %%a in (`CALL %JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.JvmOptionsParser" "!ES_JVM_OPTIONS!" ^|^| echo jvm_options_parser_failed`) do set JVM_OPTIONS=%%a @endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% %ES_JAVA_OPTS% if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" ( diff --git a/docs/java-api/docs/update.asciidoc b/docs/java-api/docs/update.asciidoc index 1c2211be9ba13..0935c9f11eca4 100644 --- a/docs/java-api/docs/update.asciidoc +++ b/docs/java-api/docs/update.asciidoc @@ -22,7 +22,9 @@ Or you can use `prepareUpdate()` method: [source,java] -------------------------------------------------- client.prepareUpdate("ttl", "doc", "1") - .setScript(new Script("ctx._source.gender = \"male\"" <1> , ScriptService.ScriptType.INLINE, null, null)) + .setScript(new Script( + "ctx._source.gender = \"male\"", <1> + ScriptService.ScriptType.INLINE, null, null)) .get(); client.prepareUpdate("ttl", "doc", "1") diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index e5eb2a6b02062..d85ad8350e9cc 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -1,8 +1,8 @@ -[[java-api]] = Java API include::../Versions.asciidoc[] +[[java-api]] [preface] == Preface diff --git a/docs/painless/painless-casting.asciidoc b/docs/painless/painless-casting.asciidoc index 4bcd14cbfc6a1..b742fbd115591 100644 --- a/docs/painless/painless-casting.asciidoc +++ b/docs/painless/painless-casting.asciidoc @@ -338,6 +338,28 @@ Use the cast operator to convert a <> value into a explicit cast `String "s"` to `char s` -> `char s`; store `char s` to `c` +[[character-string-casting]] +==== Character to String Casting + +Use the cast operator to convert a <> value into a +<> value. + +*Examples* + +* Casting a `String` reference into a `char` type value. ++ +[source,Painless] +---- +<1> char c = 65; +<2> String s = (String)c; +---- +<1> declare `char c`; + store `char 65` to `c`; +<2> declare `String s` + load from `c` -> `char A`; + explicit cast `char A` to `String "A"` -> `String "A"`; + store `String "A"` to `s` + [[boxing-unboxing]] ==== Boxing and Unboxing @@ -464,61 +486,51 @@ based on the type the `def` value represents. The following tables show all allowed casts. Read the tables row by row, where the original type is shown in the first column, and each subsequent column -indicates whether a cast to the specified target type is implicit (I), explicit -(E), or is not allowed (-). +indicates whether a cast to the specified target type is implicit (I), +explicit (E), boxed/unboxed for methods only (A), a reference type cast (@), +or is not allowed (-). See <> +for allowed reference type casts. *Primitive/Reference Types* -[cols="<3,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1"] +[cols="<3,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1"] |==== -| | o | b | s | c | i | j | f | d | O | B | S | C | I | L | F | D | T | R | def -| boolean ( o ) | | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | I -| byte ( b ) | - | | I | I | I | I | I | I | - | - | - | - | - | - | - | - | - | - | I -| short ( s ) | - | E | | E | I | I | I | I | - | - | - | - | - | - | - | - | - | - | I -| char ( c ) | - | E | E | | I | I | I | I | - | - | - | - | - | - | - | - | E | - | I -| int ( i ) | - | E | E | E | | I | I | I | - | - | - | - | - | - | - | - | - | - | I -| long ( j ) | - | E | E | E | E | | I | I | - | - | - | - | - | - | - | - | - | - | I -| float ( f ) | - | E | E | E | E | E | | I | - | - | - | - | - | - | - | - | - | - | I -| double ( d ) | - | E | E | E | E | E | E | | - | - | - | - | - | - | - | - | - | - | I -| Boolean ( O ) | - | - | - | - | - | - | - | - | - | - | - | | - | - | - | - | - | - | I -| Byte ( B ) | - | - | - | - | - | - | - | - | - | | - | - | - | - | - | - | - | - | I -| Short ( S ) | - | - | - | - | - | - | - | - | - | - | | - | - | - | - | - | - | - | I -| Character ( C ) | - | - | - | - | - | - | - | - | - | - | - | | - | - | - | - | - | - | I -| Integer ( I ) | - | - | - | - | - | - | - | - | - | - | - | - | | - | - | - | - | - | I -| Long ( L ) | - | - | - | - | - | - | - | - | - | - | - | - | - | | - | - | - | - | I -| Float ( F ) | - | - | - | - | - | - | - | - | - | - | - | - | - | - | | - | - | - | I -| Double ( D ) | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | | - | - | I -| String ( T ) | - | - | - | E | - | - | - | - | - | - | - | - | - | - | - | - | | - | I -| Reference ( R ) | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | @ | I +| | O | N | T | b | y | s | c | i | j | f | d | B | Y | S | C | I | J | F | D | R | def +| Object ( O ) | | @ | @ | - | - | - | - | - | - | - | - | @ | @ | @ | @ | @ | @ | @ | @ | @ | I +| Number ( N ) | I | | - | - | - | - | - | - | - | - | - | - | @ | @ | - | @ | @ | @ | @ | @ | I +| String ( T ) | I | - | | - | - | - | - | - | - | - | - | - | - | - | E | - | - | - | - | - | I +| boolean ( b ) | A | - | - | | - | - | - | - | - | - | - | A | - | - | - | - | - | - | - | - | I +| byte ( y ) | A | A | - | - | | I | E | I | I | I | I | - | A | A | - | A | A | A | A | - | I +| short ( s ) | A | A | - | - | E | | E | I | I | I | I | - | - | A | - | A | A | A | A | - | I +| char ( c ) | A | - | E | - | E | E | | I | I | I | I | - | - | - | A | A | A | A | A | - | I +| int ( i ) | A | A | - | - | E | E | E | | I | I | I | - | - | - | - | A | A | A | A | - | I +| long ( j ) | A | A | - | - | E | E | E | E | | I | I | - | - | - | - | - | A | A | A | - | I +| float ( f ) | A | A | - | - | E | E | E | E | E | | I | - | - | - | - | - | - | A | A | - | I +| double ( d ) | A | A | - | - | E | E | E | E | E | E | | - | - | - | - | - | - | - | A | - | I +| Boolean ( B ) | A | - | - | A | - | - | - | - | - | - | - | | - | - | - | - | - | - | - | @ | I +| Byte ( Y ) | A | I | - | - | A | A | - | A | A | A | A | - | | A | - | A | A | A | A | @ | I +| Short ( S ) | A | I | - | - | - | A | - | A | A | A | A | - | - | | - | A | A | A | A | @ | I +| Character ( C ) | A | - | - | - | - | - | A | A | A | A | A | - | - | - | | A | A | A | A | @ | I +| Integer ( I ) | A | - | - | - | - | - | - | A | A | A | A | - | - | - | - | | A | A | A | @ | I +| Long ( J ) | A | - | - | - | - | - | - | - | A | A | A | - | - | - | - | - | | A | A | @ | I +| Float ( F ) | A | - | - | - | - | - | - | - | - | A | A | - | - | - | - | - | - | | A | @ | I +| Double ( D ) | A | - | - | - | - | - | - | - | - | - | A | - | - | - | - | - | - | - | | @ | I +| Reference ( R ) | I | @ | @ | - | - | - | - | - | - | - | - | @ | @ | @ | @ | @ | @ | @ | @ | @ | I |==== -@ See <> for allowed reference - type casts. - *`def` Type* -[cols="<3,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1"] +[cols="<3,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1,^1"] |==== -| | o | b | s | c | i | j | f | d | O | B | S | C | I | L | F | D | T | R | def -| def as boolean | I | - | - | - | - | - | - | - | I | - | - | - | - | - | - | - | - | - | -| def as byte | - | I | I | I | I | I | I | I | - | I | I | I | I | I | I | I | - | - | -| def as short | - | E | I | E | I | I | I | I | - | E | I | E | I | I | I | I | - | - | -| def as char | - | E | E | I | I | I | I | I | - | E | E | I | I | I | I | I | E | - | -| def as int | - | E | E | E | I | I | I | I | - | E | E | E | I | I | I | I | - | - | -| def as long | - | E | E | E | E | I | I | I | - | E | E | E | E | I | I | I | - | - | -| def as float | - | E | E | E | E | E | I | I | - | E | E | E | E | E | I | I | - | - | -| def as double | - | E | E | E | E | E | E | I | - | E | E | E | E | E | E | I | - | - | -| def as Boolean | I | - | - | - | - | - | - | - | I | - | - | - | | - | - | - | - | - | -| def as Byte | - | I | I | I | I | I | I | I | - | I | I | I | I | I | I | I | - | - | -| def as Short | - | E | I | E | I | I | I | I | - | E | I | E | I | I | I | I | - | - | -| def as Character | - | E | E | I | I | I | I | I | - | E | E | I | I | I | I | I | - | - | -| def as Integer | - | E | E | E | I | I | I | I | - | E | E | E | I | I | I | I | - | - | -| def as Long | - | E | E | E | E | I | I | I | - | E | E | E | E | I | I | I | - | - | -| def as Float | - | E | E | E | E | E | I | I | - | E | E | E | E | E | I | I | - | - | -| def as Double | - | E | E | E | E | E | E | I | - | E | E | E | E | E | E | I | - | - | -| def as String | - | - | - | E | - | - | - | - | - | - | - | - | - | - | - | - | I | - | -| def as Reference | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | - | @ | +| | O | N | T | b | y | s | c | i | j | f | d | B | Y | S | C | I | J | F | D | R +| def as String | I | - | I | - | - | - | E | - | - | - | - | - | - | - | E | - | - | - | - | @ +| def as boolean/Boolean | I | - | - | I | - | - | - | - | - | - | - | I | - | - | - | - | - | - | - | @ +| def as byte/Byte | I | - | - | - | I | I | E | I | I | I | I | - | I | I | E | I | I | I | I | @ +| def as short/Short | I | - | - | - | E | I | E | I | I | I | I | - | E | I | E | I | I | I | I | @ +| def as char/Character | I | - | - | - | E | E | I | I | I | I | I | - | E | E | I | I | I | I | I | @ +| def as int/Integer | I | - | - | - | E | E | E | I | I | I | I | - | E | E | E | I | I | I | I | @ +| def as long/Long | I | - | - | - | E | E | E | E | I | I | I | - | E | E | E | E | I | I | I | @ +| def as float/Float | I | - | - | - | E | E | E | E | E | I | I | - | E | E | E | E | E | I | I | @ +| def as double/Double | I | - | - | - | E | E | E | E | E | E | I | - | E | E | E | E | E | E | I | @ +| def as Reference | @ | @ | @ | - | - | - | - | - | - | - | - | @ | @ | @ | @ | @ | @ | @ | @ | @ |==== - -@ See <> for allowed reference - type casts. diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc index fa228abd74ac0..785eb77f2c65e 100644 --- a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc @@ -69,10 +69,10 @@ PUT /test_index } }, "filter" : { - "my_stop": { - "type" : "stop", - "stopwords": ["bar"] - }, + "my_stop": { + "type" : "stop", + "stopwords": ["bar"] + }, "synonym_graph" : { "type" : "synonym_graph", "lenient": true, diff --git a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc index 715abdde6331d..87c99f6f38683 100644 --- a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc @@ -58,10 +58,10 @@ PUT /test_index } }, "filter" : { - "my_stop": { - "type" : "stop", - "stopwords": ["bar"] - }, + "my_stop": { + "type" : "stop", + "stopwords": ["bar"] + }, "synonym" : { "type" : "synonym", "lenient": true, diff --git a/docs/reference/ccr/getting-started.asciidoc b/docs/reference/ccr/getting-started.asciidoc index a9cd918cf8004..24304fea7642a 100644 --- a/docs/reference/ccr/getting-started.asciidoc +++ b/docs/reference/ccr/getting-started.asciidoc @@ -161,13 +161,6 @@ image::images/remote-clusters.jpg["The Remote Clusters page in {kib}"] [[ccr-getting-started-leader-index]] === Creating a leader index -Leader indices require a special index setting to ensure that the operations -that need to be replicated are available when the follower requests them from -the leader. This setting is used to control how many soft deletes are retained. -A _soft delete_ occurs whenever a document is deleted or updated. Soft deletes -can be enabled only on new indices created on or after {es} 6.5.0, and enabled -by default on new indices created on or after {es} 7.0.0. - In the following example, we will create a leader index in the remote cluster: [source,js] @@ -177,12 +170,7 @@ PUT /server-metrics "settings" : { "index" : { "number_of_shards" : 1, - "number_of_replicas" : 0, - "soft_deletes" : { - "retention" : { - "operations" : 1024 <1> - } - } + "number_of_replicas" : 0 } }, "mappings" : { @@ -214,7 +202,6 @@ PUT /server-metrics -------------------------------------------------- // CONSOLE // TEST[continued] -<1> Sets that up to 1024 soft deletes will be retained. [float] [[ccr-getting-started-follower-index]] diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc index 164c853271711..1c8b989779d6c 100644 --- a/docs/reference/how-to/indexing-speed.asciidoc +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -36,12 +36,24 @@ number of workers is. This can be tested by progressively increasing the number of workers until either I/O or CPU is saturated on the cluster. [float] -=== Increase the refresh interval - -The default <> is `1s`, which -forces Elasticsearch to create a new segment every second. -Increasing this value (to say, `30s`) will allow larger segments to flush and -decreases future merge pressure. +=== Unset or increase the refresh interval + +The operation that consists of making changes visible to search - called a +<> - is costly, and calling it often while there is +ongoing indexing activity can hurt indexing speed. + +By default, Elasticsearch runs this operation every second, but only on +indices that have received one search request or more in the last 30 seconds. +This is the optimal configuration if you have no or very little search traffic +(e.g. less than one search request every 5 minutes) and want to optimize for +indexing speed. + +On the other hand, if your index experiences regular search requests, this +default behavior means that Elasticsearch will refresh your index every 1 +second. If you can afford to increase the amount of time between when a document +gets indexed and when it becomes visible, increasing the +<> to a larger value, e.g. +`30s`, might help improve indexing speed. [float] === Disable refresh and replicas for initial loads diff --git a/docs/reference/query-dsl/multi-match-query.asciidoc b/docs/reference/query-dsl/multi-match-query.asciidoc index b8fbb61a950d0..703abd42ee621 100644 --- a/docs/reference/query-dsl/multi-match-query.asciidoc +++ b/docs/reference/query-dsl/multi-match-query.asciidoc @@ -85,11 +85,11 @@ parameter, which can be set to: were one big field. Looks for each word in *any* field. See <>. -`phrase`:: Runs a `match_phrase` query on each field and uses the `_score` +`phrase`:: Runs a `match_phrase` query on each field and uses the `_score` from the best field. See <>. -`phrase_prefix`:: Runs a `match_phrase_prefix` query on each field and - combines the `_score` from each field. See <>. +`phrase_prefix`:: Runs a `match_phrase_prefix` query on each field and uses + the `_score` from the best field. See <>. `bool_prefix`:: Creates a `match_bool_prefix` query on each field and combines the `_score` from each field. See diff --git a/docs/reference/settings/monitoring-settings.asciidoc b/docs/reference/settings/monitoring-settings.asciidoc index bad70a4359afe..c460ac51812b2 100644 --- a/docs/reference/settings/monitoring-settings.asciidoc +++ b/docs/reference/settings/monitoring-settings.asciidoc @@ -70,11 +70,11 @@ to pass through this cluster. `xpack.monitoring.collection.cluster.stats.timeout`:: -Sets the timeout for collecting the cluster statistics. Defaults to `10s`. +(<>) Timeout for collecting the cluster statistics. Defaults to `10s`. `xpack.monitoring.collection.node.stats.timeout`:: -Sets the timeout for collecting the node statistics. Defaults to `10s`. +(<>) Timeout for collecting the node statistics. Defaults to `10s`. `xpack.monitoring.collection.indices` (<>):: @@ -87,7 +87,7 @@ ensure monitoring of system indices. For example `.*,test*,-test3` `xpack.monitoring.collection.index.stats.timeout`:: -Sets the timeout for collecting index statistics. Defaults to `10s`. +(<>) Timeout for collecting index statistics. Defaults to `10s`. `xpack.monitoring.collection.index.recovery.active_only`:: @@ -96,11 +96,11 @@ collect only active recoveries. Defaults to `false`. `xpack.monitoring.collection.index.recovery.timeout`:: -Sets the timeout for collecting the recovery information. Defaults to `10s`. +(<>) Timeout for collecting the recovery information. Defaults to `10s`. `xpack.monitoring.history.duration`:: -Sets the retention duration beyond which the indices created by a Monitoring +(<>) Retention duration beyond which the indices created by a Monitoring exporter are automatically deleted. Defaults to `7d` (7 days). + -- @@ -206,12 +206,12 @@ The password for the `auth.username`. `connection.timeout`:: -The amount of time that the HTTP connection is supposed to wait for a socket to open for the +(<>) Amount of time that the HTTP connection is supposed to wait for a socket to open for the request. The default value is `6s`. `connection.read_timeout`:: -The amount of time that the HTTP connection is supposed to wait for a socket to +(<>) Amount of time that the HTTP connection is supposed to wait for a socket to send back a response. The default value is `10 * connection.timeout` (`60s` if neither are set). `ssl`:: diff --git a/docs/reference/sql/endpoints/jdbc.asciidoc b/docs/reference/sql/endpoints/jdbc.asciidoc index 37f3d59ef6410..7b1169d34d323 100644 --- a/docs/reference/sql/endpoints/jdbc.asciidoc +++ b/docs/reference/sql/endpoints/jdbc.asciidoc @@ -113,6 +113,8 @@ Query timeout (in seconds). That is the maximum amount of time waiting for a que `ssl.truststore.pass`:: trust store password +`ssl.truststore.type` (default `JKS`):: trust store type. `PKCS12` is a common, alternative format + `ssl.protocol`(default `TLS`):: SSL protocol to be used [float] diff --git a/docs/reference/sql/functions/grouping.asciidoc b/docs/reference/sql/functions/grouping.asciidoc index d3a57f0d3a81a..742f072dbd039 100644 --- a/docs/reference/sql/functions/grouping.asciidoc +++ b/docs/reference/sql/functions/grouping.asciidoc @@ -80,3 +80,7 @@ When the histogram in SQL is applied on **DATE** type instead of **DATETIME**, t the multiple of a day. E.g.: for `HISTOGRAM(CAST(birth_date AS DATE), INTERVAL '2 3:04' DAY TO MINUTE)` the interval actually used will be `INTERVAL '2' DAY`. If the interval specified is less than 1 day, e.g.: `HISTOGRAM(CAST(birth_date AS DATE), INTERVAL '20' HOUR)` then the interval used will be `INTERVAL '1' DAY`. + +[IMPORTANT] +Histogram in SQL cannot be applied applied on **TIME** type. +E.g.: `HISTOGRAM(CAST(birth_date AS TIME), INTERVAL '10' MINUTES)` is currently not supported. diff --git a/docs/reference/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc index 6e966403ce0e9..931c28fd3f6dd 100644 --- a/docs/reference/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -6,7 +6,27 @@ {es-sql} provides a comprehensive set of built-in operators and functions: * <> -* <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +* <> +** <> +** <> +* <> ** <> ** <> ** <> @@ -24,9 +44,10 @@ ** <> ** <> ** <> -* <> +* <> ** <> -* <> +* <> +* <> ** <> ** <> ** <> @@ -47,11 +68,11 @@ ** <> ** <> ** <> -* <> +* <> ** <> ** <> ** <> -* <> +* <> ** <> ** <> ** <> @@ -80,7 +101,7 @@ ** <> ** <> ** <> -* <> +* <> ** <> ** <> ** <> @@ -101,10 +122,10 @@ ** <> ** <> ** <> -* <> +* <> ** <> ** <> -* <> +* <> ** <> ** <> ** <> @@ -112,11 +133,12 @@ ** <> ** <> ** <> -* <> +* <> ** <> ** <> include::operators.asciidoc[] +include::like-rlike.asciidoc[] include::aggs.asciidoc[] include::grouping.asciidoc[] include::date-time.asciidoc[] diff --git a/docs/reference/sql/functions/like-rlike.asciidoc b/docs/reference/sql/functions/like-rlike.asciidoc new file mode 100644 index 0000000000000..c38f62ae7d7c1 --- /dev/null +++ b/docs/reference/sql/functions/like-rlike.asciidoc @@ -0,0 +1,102 @@ +[role="xpack"] +[testenv="basic"] +[[sql-like-rlike-operators]] +=== LIKE and RLIKE Operators + +`LIKE` and `RLIKE` operators are commonly used to filter data based on string patterns. They usually act on a field placed on the left-hand side of +the operator, but can also act on a constant (literal) expression. The right-hand side of the operator represents the pattern. +Both can be used in the `WHERE` clause of the `SELECT` statement, but `LIKE` can also be used in other places, such as defining an +<> or across various <>. +This section covers only the `SELECT ... WHERE ...` usage. + +NOTE: One significant difference between `LIKE`/`RLIKE` and the <> is that the former +act on <> while the latter also work on <> fields. If the field used with `LIKE`/`RLIKE` doesn't +have an exact not-normalized sub-field (of <> type) {es-sql} will not be able to run the query. If the field is either exact +or has an exact sub-field, it will use it as is, or it will automatically use the exact sub-field even if it wasn't explicitly specified in the statement. + +[[sql-like-operator]] +==== `LIKE` + +.Synopsis: +[source, sql] +-------------------------------------------------- +expression<1> LIKE constant_exp<2> +-------------------------------------------------- + +<1> typically a field, or a constant expression +<2> pattern + +.Description: + +The SQL `LIKE` operator is used to compare a value to similar values using wildcard operators. There are two wildcards used in conjunction +with the `LIKE` operator: + +* The percent sign (%) +* The underscore (_) + +The percent sign represents zero, one or multiple characters. The underscore represents a single number or character. These symbols can be +used in combinations. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs/docs.csv-spec[simpleLike] +---- + +There is, also, the possibility of using an escape character if one needs to match the wildcard characters themselves. This can be done +by using the `ESCAPE [escape_character]` statement after the `LIKE ...` operator: + + SELECT name, author FROM library WHERE name LIKE 'Dune/%' ESCAPE '/'; + +In the example above `/` is defined as an escape character which needs to be placed before the `%` or `_` characters if one needs to +match those characters in the pattern specifically. By default, there is no escape character defined. + +IMPORTANT: Even though `LIKE` is a valid option when searching or filtering in {es-sql}, full-text search predicates +`MATCH` and `QUERY` are <>. + +[[sql-rlike-operator]] +==== `RLIKE` + +.Synopsis: +[source, sql] +-------------------------------------------------- +expression<1> RLIKE constant_exp<2> +-------------------------------------------------- + +<1> typically a field, or a constant expression +<2> pattern + +.Description: + +This operator is similar to `LIKE`, but the user is not limited to search for a string based on a fixed pattern with the percent sign (`%`) +and underscore (`_`); the pattern in this case is a regular expression which allows the construction of more flexible patterns. + +For more details about the regular expressions syntax, https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/regex/Pattern.html[Java's Pattern class javadoc] +is a good starting point. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs/docs.csv-spec[simpleRLike] +---- + +IMPORTANT: Even though `RLIKE` is a valid option when searching or filtering in {es-sql}, full-text search predicates +`MATCH` and `QUERY` are <>. + +[[sql-like-prefer-full-text]] +==== Prefer full-text search predicates + +When using `LIKE`/`RLIKE`, do consider using <> which are faster, much more powerful +and offer the option of sorting by relevancy (results can be returned based on how well they matched). + +For example: + +[cols="`) +[[sql-operators-null-safe-equality]] +==== `Null safe Equality (<=>)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -24,35 +26,40 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[nullEqualsCompareWithNull] include-tagged::{sql-specs}/docs/docs.csv-spec[nullEqualsCompareTwoNulls] -------------------------------------------------- -* Inequality (`<>` or `!=`) +[[sql-operators-inequality]] +==== `Inequality (<> or !=)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldNonEquality] -------------------------------------------------- -* Comparison (`<`, `<=`, `>`, `>=`) +[[sql-operators-comparison]] +==== `Comparison (<, <=, >, >=)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldLessThan] -------------------------------------------------- -* `BETWEEN` +[[sql-operators-between]] +==== `BETWEEN` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereBetween] -------------------------------------------------- -* `IS NULL/IS NOT NULL` +[[sql-operators-is-null]] +==== `IS NULL/IS NOT NULL` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereIsNotNullAndIsNull] -------------------------------------------------- -* `IN (, , ...)` +[[sql-operators-in]] +==== `IN (, , ...)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -64,21 +71,24 @@ include-tagged::{sql-specs}/filter.sql-spec[whereWithInAndMultipleValues] Boolean operator for evaluating one or two expressions. -* `AND` +[[sql-operators-and]] +==== `AND` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldAndComparison] -------------------------------------------------- -* `OR` +[[sql-operators-or]] +==== `OR` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldOrComparison] -------------------------------------------------- -* `NOT` +[[sql-operators-not]] +==== `NOT` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -91,42 +101,48 @@ include-tagged::{sql-specs}/filter.sql-spec[whereFieldEqualityNot] Perform mathematical operations affecting one or two values. The result is a value of numeric type. -* Add (`+`) +[[sql-operators-plus]] +==== `Add (+)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[plus] -------------------------------------------------- -* Subtract (infix `-`) +[[sql-operators-subtract]] +==== `Subtract (infix -)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[minus] -------------------------------------------------- -* Negate (unary `-`) +[[sql-operators-negate]] +==== `Negate (unary -)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[unaryMinus] -------------------------------------------------- -* Multiply (`*`) +[[sql-operators-multiply]] +==== `Multiply (*)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[multiply] -------------------------------------------------- -* Divide (`/`) +[[sql-operators-divide]] +==== `Divide (/)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[divide] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Modulo_operation[Modulo] or Remainder(`%`) +[[sql-operators-remainder]] +==== `Modulo or Remainder(%)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -136,7 +152,8 @@ include-tagged::{sql-specs}/arithmetic.sql-spec[mod] [[sql-operators-cast]] === Cast Operators -* Cast (`::`) +[[sql-operators-cast-cast]] +==== `Cast (::)` `::` provides an alternative syntax to the <> function. diff --git a/docs/reference/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc index 42e5c842a4187..6c2304993c9d2 100644 --- a/docs/reference/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -44,9 +44,10 @@ s|SQL precision Most of {es} <> are available in {es-sql}, as indicated above. As one can see, all of {es} <> are mapped to the data type with the same name in {es-sql}, with the exception of **date** data type which is mapped to **datetime** in {es-sql}. -This is to avoid confusion with the ANSI SQL **DATE** (date only) type, which is also supported by {es-sql} -in queries (with the use of <>/<>), -but doesn't correspond to an actual mapping in {es} (see the <> below). +This is to avoid confusion with the ANSI SQL types **DATE** (date only) and **TIME** (time only), which are also +supported by {es-sql} in queries (with the use of +<>/<>), but don't correspond to an +actual mapping in {es} (see the <> below). Obviously, not all types in {es} have an equivalent in SQL and vice-versa hence why, {es-sql} uses the data type _particularities_ of the former over the latter as ultimately {es} is the backing store. @@ -66,6 +67,7 @@ s|SQL precision | date | 24 +| time | 18 | interval_year | 7 | interval_month | 7 | interval_day | 23 diff --git a/docs/reference/sql/limitations.asciidoc b/docs/reference/sql/limitations.asciidoc index e8c99901e27c1..e2a538cd08571 100644 --- a/docs/reference/sql/limitations.asciidoc +++ b/docs/reference/sql/limitations.asciidoc @@ -113,3 +113,28 @@ FROM (SELECT ...) WHERE [simple_condition]`, this is currently **un-supported**. Using `FIRST` and `LAST` in the `HAVING` clause is not supported. The same applies to <> and <> when their target column is of type <> as they are internally translated to `FIRST` and `LAST`. + +[float] +=== Using TIME data type in GROUP BY or <> + +Using `TIME` data type as a grouping key is currently not supported. For example: + +[source, sql] +------------------------------------------------------------- +SELECT count(*) FROM test GROUP BY CAST(date_created AS TIME); +------------------------------------------------------------- + +On the other hand, it can still be used if it's wrapped with a scalar function that returns another data type, +for example: + +[source, sql] +------------------------------------------------------------- +SELECT count(*) FROM test GROUP BY MINUTE((CAST(date_created AS TIME)); +------------------------------------------------------------- + +`TIME` data type is also currently not supported in histogram grouping function. For example: + +[source, sql] +------------------------------------------------------------- +SELECT HISTOGRAM(CAST(birth_date AS TIME), INTERVAL '10' MINUTES) as h, COUNT(*) FROM t GROUP BY h +------------------------------------------------------------- diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index f2dec9cd7b7fa..7b0f4a83e91b5 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -41,9 +41,6 @@ task copyDefaultGeoIp2DatabaseFiles(type: Copy) { project.bundlePlugin.dependsOn(copyDefaultGeoIp2DatabaseFiles) -compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked,-serial" -compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" - bundlePlugin { from("${project.buildDir}/ingest-geoip") { into '/' diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 8769a643e1d3c..a170b4417739b 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -171,14 +171,14 @@ public void close() throws IOException { * reduction of CPU usage. */ static class GeoIpCache { - private final Cache cache; + private final Cache, AbstractResponse> cache; //package private for testing GeoIpCache(long maxSize) { if (maxSize < 0) { throw new IllegalArgumentException("geoip max cache size must be 0 or greater"); } - this.cache = CacheBuilder.builder().setMaximumWeight(maxSize).build(); + this.cache = CacheBuilder., AbstractResponse>builder().setMaximumWeight(maxSize).build(); } T putIfAbsent(InetAddress ip, Class responseType, diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java index d20be74798066..2423de5fd704a 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java @@ -38,6 +38,10 @@ import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.util.Arrays; +import java.util.stream.Collectors; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; @@ -54,8 +58,8 @@ private RemoteRequestBuilders() {} static Request initialSearch(SearchRequest searchRequest, BytesReference query, Version remoteVersion) { // It is nasty to build paths with StringBuilder but we'll be careful.... StringBuilder path = new StringBuilder("/"); - addIndexesOrTypes(path, "Index", searchRequest.indices()); - addIndexesOrTypes(path, "Type", searchRequest.types()); + addIndices(path, searchRequest.indices()); + addTypes(path, searchRequest.types()); path.append("_search"); Request request = new Request("POST", path.toString()); @@ -158,14 +162,34 @@ static Request initialSearch(SearchRequest searchRequest, BytesReference query, return request; } - private static void addIndexesOrTypes(StringBuilder path, String name, String[] indicesOrTypes) { - if (indicesOrTypes == null || indicesOrTypes.length == 0) { + private static void addIndices(StringBuilder path, String[] indices) { + if (indices == null || indices.length == 0) { return; } - for (String indexOrType : indicesOrTypes) { - checkIndexOrType(name, indexOrType); + + path.append(Arrays.stream(indices).map(RemoteRequestBuilders::encodeIndex).collect(Collectors.joining(","))).append('/'); + } + + private static String encodeIndex(String s) { + if (s.contains("%")) { // already encoded, pass-through to allow this in mixed version clusters + checkIndexOrType("Index", s); + return s; + } + try { + return URLEncoder.encode(s, "utf-8"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); + } + } + + private static void addTypes(StringBuilder path, String[] types) { + if (types == null || types.length == 0) { + return; + } + for (String indexOrType : types) { + checkIndexOrType("Type", indexOrType); } - path.append(Strings.arrayToCommaDelimitedString(indicesOrTypes)).append('/'); + path.append(Strings.arrayToCommaDelimitedString(types)).append('/'); } private static void checkIndexOrType(String name, String indexOrType) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java index 0f985fd37016a..eb6192a043160 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java @@ -68,19 +68,26 @@ public void testIntialSearchPath() { searchRequest.indices("a", "b"); searchRequest.types("c", "d"); assertEquals("/a,b/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); - searchRequest.indices("cat,"); - expectBadStartRequest(searchRequest, "Index", ",", "cat,"); - searchRequest.indices("cat,", "dog"); - expectBadStartRequest(searchRequest, "Index", ",", "cat,"); - searchRequest.indices("dog", "cat,"); - expectBadStartRequest(searchRequest, "Index", ",", "cat,"); + assertEquals("/cat%2C/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); searchRequest.indices("cat/"); - expectBadStartRequest(searchRequest, "Index", "/", "cat/"); + assertEquals("/cat%2F/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); searchRequest.indices("cat/", "dog"); - expectBadStartRequest(searchRequest, "Index", "/", "cat/"); - searchRequest.indices("dog", "cat/"); - expectBadStartRequest(searchRequest, "Index", "/", "cat/"); + assertEquals("/cat%2F,dog/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + // test a specific date math + all characters that need escaping. + searchRequest.indices("", "<>/{}|+:,"); + assertEquals("/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/c,d/_search", + initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + + // pass-through if already escaped. + searchRequest.indices("%2f", "%3a"); + assertEquals("/%2f,%3a/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint()); + + // do not allow , and / if already escaped. + searchRequest.indices("%2fcat,"); + expectBadStartRequest(searchRequest, "Index", ",", "%2fcat,"); + searchRequest.indices("%3ccat/"); + expectBadStartRequest(searchRequest, "Index", "/", "%3ccat/"); searchRequest.indices("ok"); searchRequest.types("cat,"); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index fc3f80b5b32a2..f98382e5526be 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -23,6 +23,7 @@ import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; +import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.ObjectMetadata; @@ -56,6 +57,12 @@ class S3BlobContainer extends AbstractBlobContainer { + /** + * Maximum number of deletes in a {@link DeleteObjectsRequest}. + * @see S3 Documentation. + */ + private static final int MAX_BULK_DELETES = 1000; + private final S3BlobStore blobStore; private final String keyPath; @@ -118,6 +125,51 @@ public void deleteBlob(String blobName) throws IOException { deleteBlobIgnoringIfNotExists(blobName); } + @Override + public void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOException { + if (blobNames.isEmpty()) { + return; + } + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + // S3 API only allows 1k blobs per delete so we split up the given blobs into requests of max. 1k deletes + final List deleteRequests = new ArrayList<>(); + final List partition = new ArrayList<>(); + for (String blob : blobNames) { + partition.add(buildKey(blob)); + if (partition.size() == MAX_BULK_DELETES ) { + deleteRequests.add(bulkDelete(blobStore.bucket(), partition)); + partition.clear(); + } + } + if (partition.isEmpty() == false) { + deleteRequests.add(bulkDelete(blobStore.bucket(), partition)); + } + SocketAccess.doPrivilegedVoid(() -> { + AmazonClientException aex = null; + for (DeleteObjectsRequest deleteRequest : deleteRequests) { + try { + clientReference.client().deleteObjects(deleteRequest); + } catch (AmazonClientException e) { + if (aex == null) { + aex = e; + } else { + aex.addSuppressed(e); + } + } + } + if (aex != null) { + throw aex; + } + }); + } catch (final AmazonClientException e) { + throw new IOException("Exception when deleting blobs [" + blobNames + "]", e); + } + } + + private static DeleteObjectsRequest bulkDelete(String bucket, List blobs) { + return new DeleteObjectsRequest(bucket).withKeys(blobs.toArray(Strings.EMPTY_ARRAY)).withQuiet(true); + } + @Override public void deleteBlobIgnoringIfNotExists(String blobName) throws IOException { try (AmazonS3Reference clientReference = blobStore.clientReference()) { diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 522f15661bd64..f9d6ada5da38f 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -157,8 +157,6 @@ class S3Repository extends BlobStoreRepository { private final String cannedACL; - private final RepositoryMetaData repositoryMetaData; - /** * Constructs an s3 backed repository */ @@ -169,8 +167,6 @@ class S3Repository extends BlobStoreRepository { super(metadata, settings, namedXContentRegistry); this.service = service; - this.repositoryMetaData = metadata; - // Parse and validate the user's S3 Storage Class setting this.bucket = BUCKET_SETTING.get(metadata.settings()); if (bucket == null) { @@ -216,7 +212,7 @@ class S3Repository extends BlobStoreRepository { @Override protected S3BlobStore createBlobStore() { - return new S3BlobStore(service, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass, repositoryMetaData); + return new S3BlobStore(service, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass, metadata); } // only use for testing diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java index a411a1c53cf36..51b1d5159edfe 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java @@ -324,7 +324,7 @@ private PathTrie defaultHandlers(final Map bucke // Delete Multiple Objects // // https://docs.aws.amazon.com/AmazonS3/latest/API/multiobjectdeleteapi.html - handlers.insert(nonAuthPath(HttpPost.METHOD_NAME, "/"), (request) -> { + final RequestHandler bulkDeleteHandler = request -> { final List deletes = new ArrayList<>(); final List errors = new ArrayList<>(); @@ -344,7 +344,6 @@ private PathTrie defaultHandlers(final Map bucke if (closingOffset != -1) { offset = offset + startMarker.length(); final String objectName = requestBody.substring(offset, closingOffset); - boolean found = false; for (Bucket bucket : buckets.values()) { if (bucket.objects.containsKey(objectName)) { @@ -369,7 +368,9 @@ private PathTrie defaultHandlers(final Map bucke } } return newInternalError(request.getId(), "Something is wrong with this POST multiple deletes request"); - }); + }; + handlers.insert(nonAuthPath(HttpPost.METHOD_NAME, "/"), bulkDeleteHandler); + handlers.insert(nonAuthPath(HttpPost.METHOD_NAME, "/{bucket}"), bulkDeleteHandler); // non-authorized requests diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java index 9e0a6009659dc..37f5d9b03dbc2 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java @@ -158,11 +158,7 @@ public DeleteObjectsResult deleteObjects(DeleteObjectsRequest request) throws Sd final List deletions = new ArrayList<>(); for (DeleteObjectsRequest.KeyVersion key : request.getKeys()) { - if (blobs.remove(key.getKey()) == null) { - AmazonS3Exception exception = new AmazonS3Exception("[" + key + "] does not exist."); - exception.setStatusCode(404); - throw exception; - } else { + if (blobs.remove(key.getKey()) != null) { DeleteObjectsResult.DeletedObject deletion = new DeleteObjectsResult.DeletedObject(); deletion.setKey(key.getKey()); deletions.add(deletion); diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index f8246ca6dcd24..1b2eb7064f04c 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -121,6 +121,7 @@ protected void doRun() throws Exception { return future; } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40731") public void testRecoveryWithConcurrentIndexing() throws Exception { final String index = "recovery_with_concurrent_indexing"; Response response = client().performRequest(new Request("GET", "_nodes")); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index af22861a90aa8..ba0484046adfd 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -20,21 +20,24 @@ package org.elasticsearch.packaging.test; import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.http.client.fluent.Request; import org.elasticsearch.packaging.util.Archives; import org.elasticsearch.packaging.util.Distribution; +import org.elasticsearch.packaging.util.FileUtils; import org.elasticsearch.packaging.util.Installation; import org.elasticsearch.packaging.util.Platforms; import org.elasticsearch.packaging.util.ServerUtils; import org.elasticsearch.packaging.util.Shell; import org.elasticsearch.packaging.util.Shell.Result; -import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.stream.Stream; +import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.elasticsearch.packaging.util.Archives.ARCHIVE_OWNER; import static org.elasticsearch.packaging.util.Archives.installArchive; import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation; @@ -49,6 +52,7 @@ import static org.elasticsearch.packaging.util.FileUtils.rm; import static org.elasticsearch.packaging.util.ServerUtils.makeRequest; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; @@ -62,12 +66,12 @@ @TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) public abstract class ArchiveTestCase extends PackagingTestCase { - public void test10Install() { + public void test10Install() throws Exception { installation = installArchive(distribution()); verifyArchiveInstallation(installation, distribution()); } - public void test20PluginsListWithNoPlugins() { + public void test20PluginsListWithNoPlugins() throws Exception { assumeThat(installation, is(notNullValue())); final Installation.Executables bin = installation.executables(); @@ -77,7 +81,7 @@ public void test20PluginsListWithNoPlugins() { assertThat(r.stdout, isEmptyString()); } - public void test30NoJava() { + public void test30NoJava() throws Exception { assumeThat(installation, is(notNullValue())); final Installation.Executables bin = installation.executables(); @@ -101,7 +105,7 @@ public void test30NoJava() { } } - public void test40CreateKeystoreManually() { + public void test40CreateKeystoreManually() throws Exception { assumeThat(installation, is(notNullValue())); final Installation.Executables bin = installation.executables(); @@ -134,7 +138,7 @@ public void test40CreateKeystoreManually() { }); } - public void test50StartAndStop() throws IOException { + public void test50StartAndStop() throws Exception { assumeThat(installation, is(notNullValue())); // cleanup from previous test @@ -152,7 +156,7 @@ public void test50StartAndStop() throws IOException { Archives.stopElasticsearch(installation); } - public void assertRunsWithJavaHome() throws IOException { + public void assertRunsWithJavaHome() throws Exception { Shell sh = newShell(); Platforms.onLinux(() -> { @@ -173,13 +177,13 @@ public void assertRunsWithJavaHome() throws IOException { assertThat(new String(Files.readAllBytes(log), StandardCharsets.UTF_8), containsString(systemJavaHome)); } - public void test51JavaHomeOverride() throws IOException { + public void test51JavaHomeOverride() throws Exception { assumeThat(installation, is(notNullValue())); assertRunsWithJavaHome(); } - public void test52BundledJdkRemoved() throws IOException { + public void test52BundledJdkRemoved() throws Exception { assumeThat(installation, is(notNullValue())); assumeThat(distribution().hasJdk, is(true)); @@ -192,7 +196,63 @@ public void test52BundledJdkRemoved() throws IOException { } } - public void test60AutoCreateKeystore() { + public void test53JavaHomeWithSpecialCharacters() throws Exception { + assumeThat(installation, is(notNullValue())); + + Platforms.onWindows(() -> { + final Shell sh = new Shell(); + try { + // once windows 2012 is no longer supported and powershell 5.0 is always available we can change this command + sh.run("cmd /c mklink /D 'C:\\Program Files (x86)\\java' $Env:JAVA_HOME"); + + sh.getEnv().put("JAVA_HOME", "C:\\Program Files (x86)\\java"); + + //verify ES can start, stop and run plugin list + Archives.runElasticsearch(installation, sh); + + Archives.stopElasticsearch(installation); + + String pluginListCommand = installation.bin + "/elasticsearch-plugin list"; + Result result = sh.run(pluginListCommand); + assertThat(result.exitCode, equalTo(0)); + + } finally { + //clean up sym link + sh.run("cmd /c del /F /Q 'C:\\Program Files (x86)\\java' "); + } + }); + + Platforms.onLinux(() -> { + final Shell sh = new Shell(); + // Create temporary directory with a space and link to java binary. + // Use it as java_home + String nameWithSpace = RandomStrings.randomAsciiAlphanumOfLength(getRandom(), 10) + "java home"; + String test_java_home = FileUtils.mkdir(Paths.get("/home",ARCHIVE_OWNER, nameWithSpace)).toAbsolutePath().toString(); + try { + final String systemJavaHome = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim(); + final String java = systemJavaHome + "/bin/java"; + + sh.run("mkdir -p \"" + test_java_home + "/bin\""); + sh.run("ln -s \"" + java + "\" \"" + test_java_home + "/bin/java\""); + sh.run("chown -R " + ARCHIVE_OWNER + ":" + ARCHIVE_OWNER + " \"" + test_java_home + "\""); + + sh.getEnv().put("JAVA_HOME", test_java_home); + + //verify ES can start, stop and run plugin list + Archives.runElasticsearch(installation, sh); + + Archives.stopElasticsearch(installation); + + String pluginListCommand = installation.bin + "/elasticsearch-plugin list"; + Result result = sh.run(pluginListCommand); + assertThat(result.exitCode, equalTo(0)); + } finally { + FileUtils.rm(Paths.get("\"" + test_java_home + "\"")); + } + }); + } + + public void test60AutoCreateKeystore() throws Exception { assumeThat(installation, is(notNullValue())); assertThat(installation.config("elasticsearch.keystore"), file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); @@ -211,7 +271,7 @@ public void test60AutoCreateKeystore() { }); } - public void test70CustomPathConfAndJvmOptions() throws IOException { + public void test70CustomPathConfAndJvmOptions() throws Exception { assumeThat(installation, is(notNullValue())); final Path tempConf = getTempDir().resolve("esconf-alternate"); @@ -260,7 +320,7 @@ public void test70CustomPathConfAndJvmOptions() throws IOException { } } - public void test80RelativePathConf() throws IOException { + public void test80RelativePathConf() throws Exception { assumeThat(installation, is(notNullValue())); final Path temp = getTempDir().resolve("esconf-alternate"); @@ -304,7 +364,7 @@ public void test80RelativePathConf() throws IOException { } } - public void test90SecurityCliPackaging() { + public void test90SecurityCliPackaging() throws Exception { assumeThat(installation, is(notNullValue())); final Installation.Executables bin = installation.executables(); @@ -328,7 +388,7 @@ public void test90SecurityCliPackaging() { } } - public void test91ElasticsearchShardCliPackaging() { + public void test91ElasticsearchShardCliPackaging() throws Exception { assumeThat(installation, is(notNullValue())); final Installation.Executables bin = installation.executables(); @@ -345,7 +405,7 @@ public void test91ElasticsearchShardCliPackaging() { } } - public void test92ElasticsearchNodeCliPackaging() { + public void test92ElasticsearchNodeCliPackaging() throws Exception { assumeThat(installation, is(notNullValue())); final Installation.Executables bin = installation.executables(); @@ -363,7 +423,7 @@ public void test92ElasticsearchNodeCliPackaging() { } } - public void test93ElasticsearchNodeCustomDataPathAndNotEsHomeWorkDir() throws IOException { + public void test93ElasticsearchNodeCustomDataPathAndNotEsHomeWorkDir() throws Exception { assumeThat(installation, is(notNullValue())); Path relativeDataPath = installation.data.relativize(installation.home); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java index 1b2b891da4513..12597ae8b4de2 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java @@ -26,7 +26,6 @@ import org.junit.Before; import org.junit.BeforeClass; -import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; @@ -54,7 +53,7 @@ public abstract class DebPreservationTestCase extends PackagingTestCase { protected abstract Distribution distribution(); @BeforeClass - public static void cleanup() { + public static void cleanup() throws Exception { installation = null; cleanEverything(); } @@ -65,14 +64,14 @@ public void onlyCompatibleDistributions() { assumeTrue("only compatible distributions", distribution().packaging.compatible); } - public void test10Install() throws IOException { + public void test10Install() throws Exception { assertRemoved(distribution()); installation = install(distribution()); assertInstalled(distribution()); verifyPackageInstallation(installation, distribution(), newShell()); } - public void test20Remove() { + public void test20Remove() throws Exception { assumeThat(installation, is(notNullValue())); remove(distribution()); @@ -117,7 +116,7 @@ public void test20Remove() { assertTrue(Files.exists(installation.envFile)); } - public void test30Purge() { + public void test30Purge() throws Exception { assumeThat(installation, is(notNullValue())); final Shell sh = new Shell(); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java index 458359b299e75..c664e28931087 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java @@ -20,6 +20,7 @@ package org.elasticsearch.packaging.test; import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.http.client.fluent.Request; import org.elasticsearch.packaging.util.FileUtils; import org.elasticsearch.packaging.util.Shell; @@ -27,7 +28,6 @@ import org.hamcrest.CoreMatchers; import org.junit.Before; -import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -36,6 +36,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.elasticsearch.packaging.util.FileUtils.append; import static org.elasticsearch.packaging.util.FileUtils.assertPathsDontExist; import static org.elasticsearch.packaging.util.FileUtils.assertPathsExist; @@ -72,19 +73,19 @@ public abstract class PackageTestCase extends PackagingTestCase { private Shell sh; @Before - public void onlyCompatibleDistributions() { + public void onlyCompatibleDistributions() throws Exception { assumeTrue("only compatible distributions", distribution().packaging.compatible); sh = newShell(); } - public void test10InstallPackage() throws IOException { + public void test10InstallPackage() throws Exception { assertRemoved(distribution()); installation = install(distribution()); assertInstalled(distribution()); verifyPackageInstallation(installation, distribution(), sh); } - public void test20PluginsCommandWhenNoPlugins() { + public void test20PluginsCommandWhenNoPlugins() throws Exception { assumeThat(installation, is(notNullValue())); assertThat(sh.run(installation.bin("elasticsearch-plugin") + " list").stdout, isEmptyString()); @@ -104,7 +105,7 @@ public void test31InstallDoesNotStartServer() { assertThat(sh.run("ps aux").stdout, not(containsString("org.elasticsearch.bootstrap.Elasticsearch"))); } - public void assertRunsWithJavaHome() throws IOException { + public void assertRunsWithJavaHome() throws Exception { String systemJavaHome = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim(); byte[] originalEnvFile = Files.readAllBytes(installation.envFile); try { @@ -121,7 +122,7 @@ public void assertRunsWithJavaHome() throws IOException { assertThat(new String(Files.readAllBytes(log), StandardCharsets.UTF_8), containsString(systemJavaHome)); } - public void test32JavaHomeOverride() throws IOException { + public void test32JavaHomeOverride() throws Exception { assumeThat(installation, is(notNullValue())); // we always run with java home when no bundled jdk is included, so this test would be repetitive assumeThat(distribution().hasJdk, is(true)); @@ -129,7 +130,7 @@ public void test32JavaHomeOverride() throws IOException { assertRunsWithJavaHome(); } - public void test42BundledJdkRemoved() throws IOException { + public void test42BundledJdkRemoved() throws Exception { assumeThat(installation, is(notNullValue())); assumeThat(distribution().hasJdk, is(true)); @@ -142,7 +143,7 @@ public void test42BundledJdkRemoved() throws IOException { } } - public void test40StartServer() throws IOException { + public void test40StartServer() throws Exception { String start = sh.runIgnoreExitCode("date ").stdout.trim(); assumeThat(installation, is(notNullValue())); @@ -159,7 +160,7 @@ public void test40StartServer() throws IOException { verifyPackageInstallation(installation, distribution(), sh); // check startup script didn't change permissions } - public void test50Remove() { + public void test50Remove() throws Exception { assumeThat(installation, is(notNullValue())); remove(distribution()); @@ -209,7 +210,7 @@ public void test50Remove() { assertFalse(Files.exists(SYSTEMD_SERVICE)); } - public void test60Reinstall() throws IOException { + public void test60Reinstall() throws Exception { assumeThat(installation, is(notNullValue())); installation = install(distribution()); @@ -220,7 +221,7 @@ public void test60Reinstall() throws IOException { assertRemoved(distribution()); } - public void test70RestartServer() throws IOException { + public void test70RestartServer() throws Exception { try { installation = install(distribution()); assertInstalled(distribution()); @@ -235,7 +236,7 @@ public void test70RestartServer() throws IOException { } - public void test72TestRuntimeDirectory() throws IOException { + public void test72TestRuntimeDirectory() throws Exception { try { installation = install(distribution()); FileUtils.rm(installation.pidDir); @@ -247,7 +248,7 @@ public void test72TestRuntimeDirectory() throws IOException { } } - public void test73gcLogsExist() throws IOException { + public void test73gcLogsExist() throws Exception { installation = install(distribution()); startElasticsearch(sh); // it can be gc.log or gc.log.0.current @@ -264,7 +265,7 @@ public void test73gcLogsExist() throws IOException { * # but it should not block ES from starting * # see https://github.com/elastic/elasticsearch/issues/11594 */ - public void test80DeletePID_DIRandRestart() throws IOException { + public void test80DeletePID_DIRandRestart() throws Exception { assumeTrue(isSystemd()); rm(installation.pidDir); @@ -280,7 +281,7 @@ public void test80DeletePID_DIRandRestart() throws IOException { stopElasticsearch(sh); } - public void test81CustomPathConfAndJvmOptions() throws IOException { + public void test81CustomPathConfAndJvmOptions() throws Exception { assumeTrue(isSystemd()); assumeThat(installation, is(notNullValue())); @@ -291,8 +292,9 @@ public void test81CustomPathConfAndJvmOptions() throws IOException { // The custom config directory is not under /tmp or /var/tmp because // systemd's private temp directory functionally means different // processes can have different views of what's in these directories - String temp = sh.runIgnoreExitCode("mktemp -p /etc -d").stdout.trim(); - final Path tempConf = Paths.get(temp); + String randomName = RandomStrings.randomAsciiAlphanumOfLength(getRandom(), 10); + sh.run("mkdir /etc/"+randomName); + final Path tempConf = Paths.get("/etc/"+randomName); try { mkdir(tempConf); @@ -331,7 +333,7 @@ public void test81CustomPathConfAndJvmOptions() throws IOException { } } - public void test82SystemdMask() throws IOException { + public void test82SystemdMask() throws Exception { try { assumeTrue(isSystemd()); @@ -345,7 +347,7 @@ public void test82SystemdMask() throws IOException { } } - public void test83serviceFileSetsLimits() throws IOException { + public void test83serviceFileSetsLimits() throws Exception { // Limits are changed on systemd platforms only assumeTrue(isSystemd()); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java index 7cb860e617eb0..bd7738aeac4ac 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -64,7 +64,7 @@ public void setup() { protected static Installation installation; @BeforeClass - public static void cleanup() { + public static void cleanup() throws Exception { installation = null; cleanEverything(); } @@ -72,7 +72,7 @@ public static void cleanup() { /** The {@link Distribution} that should be tested in this case */ protected abstract Distribution distribution(); - protected Shell newShell() { + protected Shell newShell() throws Exception { Shell sh = new Shell(); if (distribution().hasJdk == false) { Platforms.onLinux(() -> { diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java index 5cfc10b110afb..7b6ac039fc55c 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java @@ -26,7 +26,6 @@ import org.junit.Before; import org.junit.BeforeClass; -import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.stream.Stream; @@ -56,7 +55,7 @@ public abstract class RpmPreservationTestCase extends PackagingTestCase { protected abstract Distribution distribution(); @BeforeClass - public static void cleanup() { + public static void cleanup() throws Exception { installation = null; cleanEverything(); } @@ -67,14 +66,14 @@ public void onlyCompatibleDistributions() { assumeTrue("only compatible distributions", distribution().packaging.compatible); } - public void test10Install() throws IOException { + public void test10Install() throws Exception { assertRemoved(distribution()); installation = install(distribution()); assertInstalled(distribution()); verifyPackageInstallation(installation, distribution(), newShell()); } - public void test20Remove() { + public void test20Remove() throws Exception { assumeThat(installation, is(notNullValue())); remove(distribution()); @@ -89,7 +88,7 @@ public void test20Remove() { assertFalse(Files.exists(installation.envFile)); } - public void test30PreserveConfig() throws IOException { + public void test30PreserveConfig() throws Exception { final Shell sh = new Shell(); installation = install(distribution()); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/WindowsServiceTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/WindowsServiceTestCase.java index 08f54096e073d..57eaf13fe9e94 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/WindowsServiceTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/WindowsServiceTestCase.java @@ -102,7 +102,7 @@ private void assertExit(Result result, String script, int exitCode) { } } - public void test10InstallArchive() { + public void test10InstallArchive() throws Exception { installation = installArchive(distribution()); verifyArchiveInstallation(installation, distribution()); serviceScript = installation.bin("elasticsearch-service.bat").toString(); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java index c8ddda2dc4f37..e557b47fb8912 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java @@ -19,7 +19,6 @@ package org.elasticsearch.packaging.util; -import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -37,15 +36,14 @@ import static org.elasticsearch.packaging.util.FileUtils.getDefaultArchiveInstallPath; import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; import static org.elasticsearch.packaging.util.FileUtils.lsGlob; - import static org.elasticsearch.packaging.util.FileUtils.mv; import static org.elasticsearch.packaging.util.FileUtils.slurp; import static org.elasticsearch.packaging.util.Platforms.isDPKG; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.isEmptyOrNullString; -import static org.hamcrest.core.Is.is; -import static org.hamcrest.collection.IsEmptyCollection.empty; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.collection.IsEmptyCollection.empty; +import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; import static org.junit.Assert.assertTrue; @@ -59,11 +57,11 @@ public class Archives { ? "vagrant" : "elasticsearch"; - public static Installation installArchive(Distribution distribution) { + public static Installation installArchive(Distribution distribution) throws Exception { return installArchive(distribution, getDefaultArchiveInstallPath(), getCurrentVersion()); } - public static Installation installArchive(Distribution distribution, Path fullInstallPath, String version) { + public static Installation installArchive(Distribution distribution, Path fullInstallPath, String version) throws Exception { final Shell sh = new Shell(); final Path distributionFile = getDistributionFile(distribution); @@ -255,7 +253,7 @@ private static void verifyDefaultInstallation(Installation es, Distribution dist ).forEach(configFile -> assertThat(es.config(configFile), file(File, owner, owner, p660))); } - public static void runElasticsearch(Installation installation, Shell sh) throws IOException { + public static void runElasticsearch(Installation installation, Shell sh) throws Exception { final Path pidFile = installation.home.resolve("elasticsearch.pid"); final Installation.Executables bin = installation.executables(); @@ -305,7 +303,7 @@ public static void runElasticsearch(Installation installation, Shell sh) throws Platforms.onWindows(() -> sh.run("Get-Process -Id " + pid)); } - public static void stopElasticsearch(Installation installation) { + public static void stopElasticsearch(Installation installation) throws Exception { Path pidFile = installation.home.resolve("elasticsearch.pid"); assertTrue(Files.exists(pidFile)); String pid = slurp(pidFile).trim(); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java index fda61e9fb36e5..f9b98d58ccacc 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java @@ -50,7 +50,7 @@ public class Cleanup { // todo private static final List ELASTICSEARCH_FILES_WINDOWS = Collections.emptyList(); - public static void cleanEverything() { + public static void cleanEverything() throws Exception { final Shell sh = new Shell(); // kill elasticsearch processes diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java index afa7e371c2c55..c5dcc34af882f 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java @@ -54,14 +54,14 @@ public class Packages { public static final Path SYSVINIT_SCRIPT = Paths.get("/etc/init.d/elasticsearch"); public static final Path SYSTEMD_SERVICE = Paths.get("/usr/lib/systemd/system/elasticsearch.service"); - public static void assertInstalled(Distribution distribution) { + public static void assertInstalled(Distribution distribution) throws Exception { final Result status = packageStatus(distribution); assertThat(status.exitCode, is(0)); Platforms.onDPKG(() -> assertFalse(Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find())); } - public static void assertRemoved(Distribution distribution) { + public static void assertRemoved(Distribution distribution) throws Exception { final Result status = packageStatus(distribution); Platforms.onRPM(() -> assertThat(status.exitCode, is(1))); @@ -133,7 +133,7 @@ public static Result runInstallCommand(Distribution distribution, String version } } - public static void remove(Distribution distribution) { + public static void remove(Distribution distribution) throws Exception { final Shell sh = new Shell(); Platforms.onRPM(() -> { diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java index dbac9c88d26c9..6258c1336b2fc 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java @@ -65,25 +65,25 @@ public static boolean isSysVInit() { return new Shell().runIgnoreExitCode("which service").isSuccess(); } - public static void onWindows(PlatformAction action) { + public static void onWindows(PlatformAction action) throws Exception { if (WINDOWS) { action.run(); } } - public static void onLinux(PlatformAction action) { + public static void onLinux(PlatformAction action) throws Exception { if (LINUX) { action.run(); } } - public static void onRPM(PlatformAction action) { + public static void onRPM(PlatformAction action) throws Exception { if (isRPM()) { action.run(); } } - public static void onDPKG(PlatformAction action) { + public static void onDPKG(PlatformAction action) throws Exception { if (isDPKG()) { action.run(); } @@ -94,6 +94,6 @@ public static void onDPKG(PlatformAction action) { */ @FunctionalInterface public interface PlatformAction { - void run(); + void run() throws Exception; } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml index e8593c4c8d5ff..591cb057ede4c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml @@ -1,9 +1,7 @@ --- "Array of objects": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: bulk: @@ -29,9 +27,7 @@ --- "Empty _id": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: bulk: @@ -65,9 +61,7 @@ "empty action": - skip: - version: " - 6.99.99" features: headers - reason: types are required in requests before 7.0.0 - do: catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml index b23517f6a8f25..f99053cba42ec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml @@ -1,9 +1,7 @@ --- "List of strings": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: bulk: refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml index 38706d133e44b..62cc5838a528d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml @@ -1,9 +1,7 @@ --- "One big string": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: bulk: refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml index 5e783d60d3d46..fcdbfec55a472 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml @@ -1,9 +1,7 @@ --- "Source filtering": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml index 77098779c0c4f..29ee7bbd11f3a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml @@ -1,9 +1,7 @@ --- "refresh=true immediately makes changes are visible in search": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: bulk: refresh: true @@ -21,9 +19,7 @@ --- "refresh=empty string immediately makes changes are visible in search": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: bulk: refresh: "" @@ -42,9 +38,7 @@ --- "refresh=wait_for waits until changes are visible in search": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: bulk: refresh: wait_for diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml index cad0891b21e52..50bf6ac5bcf3a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/70_mix_typeless_typeful.yml @@ -1,10 +1,6 @@ --- "bulk without types on an index that has types": - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - do: indices.create: # not using include_type_name: false on purpose include_type_name: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/81_cas_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/81_cas_with_types.yml index 101316e7bf504..7de82e4fb23e0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/81_cas_with_types.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/81_cas_with_types.yml @@ -1,10 +1,6 @@ --- "Compare And Swap Sequence Numbers": - - skip: - version: " - 6.6.99" - reason: cas operations with sequence numbers was added in 6.7 - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml index 1ce8468cb51f9..e91db3f434a92 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml @@ -1,8 +1,5 @@ --- "Test cat thread_pool output": - - skip: - version: " - 6.99.99" - reason: this API was changed in a backwards-incompatible fashion in 7.0.0 so we need to skip in a mixed cluster - do: cat.thread_pool: {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml index 01c82623ef031..b06005b7ea765 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yml @@ -94,9 +94,6 @@ --- "cluster health basic test, one index with wait for no initializing shards": - - skip: - version: " - 6.1.99" - reason: "wait_for_no_initializing_shards is introduced in 6.2.0" - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/10_basic.yml index b443e322f80f6..ef17e4a3f2473 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/10_basic.yml @@ -7,9 +7,6 @@ --- "get cluster state returns cluster_uuid at the top level": - - skip: - version: " - 6.3.99" - reason: "cluster state including cluster_uuid at the top level is new in v6.4.0 and higher" - do: cluster.state: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml index 88da42ee876be..0ab2b64f09a9d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml @@ -155,9 +155,6 @@ setup: --- "Filtering the cluster state returns cluster_uuid at the top level regardless of metric filters": - - skip: - version: " - 6.3.99" - reason: "cluster state including cluster_uuid at the top level is new in v6.4.0 and higher" # Get the current cluster_uuid - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml index bb57709e14792..d2834499ef6aa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.stats/10_basic.yml @@ -32,9 +32,6 @@ --- "get cluster stats returns cluster_uuid at the top level": - - skip: - version: " - 6.99.99" - reason: "cluster stats including cluster_uuid at the top level is new in v6.5.0 and higher" - do: cluster.stats: {} @@ -69,9 +66,6 @@ --- "get cluster stats returns discovery types": - - skip: - version: " - 6.99.99" - reason: "discovery types are added for v7.0.0" - do: cluster.stats: {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/10_with_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/10_with_id.yml index 410b31acb7138..f69e3600a43d3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/10_with_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/10_with_id.yml @@ -1,8 +1,6 @@ --- "Create with ID": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: create: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/15_without_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/15_without_id.yml index 5280c5bb9946d..ddfb4775458c9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/15_without_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/15_without_id.yml @@ -1,8 +1,6 @@ --- "Create without ID": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: catch: param create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/35_external_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/35_external_version.yml index 47dc5b6059609..86d0d4b59e06b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/35_external_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/35_external_version.yml @@ -1,8 +1,6 @@ --- "External version": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: catch: bad_request create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yml index 9c048c361bd5c..af8d865031bc4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yml @@ -1,8 +1,6 @@ --- "Routing": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yml index dd8acd9f99f4f..e348a980de685 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yml @@ -1,8 +1,6 @@ --- "Refresh": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: index: test_1 @@ -44,9 +42,7 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: create: index: test_1 @@ -66,9 +62,7 @@ --- "refresh=wait_for waits until changes are visible in search": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: index: create_60_refresh_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml index e6d2413f16788..cc9a82cbcbc9e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/70_nested.yml @@ -1,8 +1,6 @@ --- setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: index: test_1 @@ -16,9 +14,7 @@ setup: --- "Indexing a doc with No. nested objects less or equal to index.mapping.nested_objects.limit should succeed": - - skip: - version: " - 6.99.99" - reason: index.mapping.nested_objects setting has been added in 7.0.0 + - do: create: index: test_1 @@ -29,9 +25,7 @@ setup: --- "Indexing a doc with No. nested objects more than index.mapping.nested_objects.limit should fail": - - skip: - version: " - 6.99.99" - reason: index.mapping.nested_objects setting has been added in 7.0.0 + - do: catch: /The number of nested documents has exceeded the allowed limit of \[2\]. This limit can be set by changing the \[index.mapping.nested_objects.limit\] index level setting\./ create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/71_nested_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/71_nested_with_types.yml index 755aaca448b0b..1b8c549942730 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/71_nested_with_types.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/71_nested_with_types.yml @@ -15,9 +15,7 @@ setup: --- "Indexing a doc with No. nested objects less or equal to index.mapping.nested_objects.limit should succeed": - - skip: - version: " - 6.99.99" - reason: index.mapping.nested_objects setting has been added in 7.0.0 + - do: create: index: test_1 @@ -29,9 +27,7 @@ setup: --- "Indexing a doc with No. nested objects more than index.mapping.nested_objects.limit should fail": - - skip: - version: " - 6.99.99" - reason: index.mapping.nested_objects setting has been added in 7.0.0 + - do: catch: /The number of nested documents has exceeded the allowed limit of \[2\]. This limit can be set by changing the \[index.mapping.nested_objects.limit\] index level setting\./ create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml index 842d749d7b14d..f58f1435046fc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml @@ -1,9 +1,7 @@ --- "Basic": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml index 3fc10bc8db12d..673897af1d62e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml @@ -1,9 +1,7 @@ --- "Delete check shard header": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml index 13356cd938c48..dba565179cded 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml @@ -1,9 +1,7 @@ --- "Delete result field": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_cas.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_cas.yml index f3c7b0acbcccd..a739e3f53cd44 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_cas.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_cas.yml @@ -1,9 +1,7 @@ --- "Internal version": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml index d7cc4fce0eda5..e076dbded6f0c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml @@ -1,9 +1,7 @@ --- "External version": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml index ebe1680551c96..03adef4a75fa9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml @@ -1,9 +1,7 @@ --- "External GTE version": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml index 27e9350caed70..c2d6adfcd4e72 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml @@ -1,9 +1,7 @@ --- "Routing": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml index 935e0946f100b..bac8731d47250 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml @@ -1,9 +1,7 @@ --- "Refresh": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: @@ -81,9 +79,7 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: @@ -118,9 +114,7 @@ --- "refresh=wait_for waits until changes are visible in search": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml index b8f81080f3ee8..ae4e61c075ae8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml @@ -1,9 +1,7 @@ --- "Missing document with catch": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: catch: missing @@ -14,9 +12,7 @@ --- "Missing document with ignore": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: delete: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/70_mix_typeless_typeful.yml index e0f20795e41ca..9f6f170dcd192 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/70_mix_typeless_typeful.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/70_mix_typeless_typeful.yml @@ -1,10 +1,6 @@ --- "DELETE with typeless API on an index that has types": - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - do: indices.create: # not using include_type_name: false on purpose include_type_name: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml index 1ab90e3efa83f..141e2898bb21b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/10_basic.yml @@ -1,8 +1,6 @@ --- "Basic": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: exists: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/40_routing.yml index 8d59c8a0535f5..086286276fcd3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/40_routing.yml @@ -1,8 +1,6 @@ --- "Routing": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/60_realtime_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/60_realtime_refresh.yml index e12a504349c4d..6aebaa78b8a0a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/60_realtime_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/60_realtime_refresh.yml @@ -1,8 +1,6 @@ --- "Realtime Refresh": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml index 6fabdd59820cf..a042888d66d1c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/70_defaults.yml @@ -1,8 +1,6 @@ --- "Client-side default type": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml index bfe8da8d91519..50a5804d887c7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml @@ -1,7 +1,5 @@ setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml index ad596f980807b..ae03a58f13f6d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml @@ -1,8 +1,6 @@ --- "Source filtering": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml index ac34d4c2495f2..61321c05548ce 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml @@ -1,8 +1,6 @@ --- "explain with query_string parameters": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml index 36fdbaa6b6f78..af2feb4231f30 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml @@ -1,10 +1,6 @@ --- "Explain with typeless API on an index that has types": - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - do: indices.create: # not using include_type_name: false on purpose include_type_name: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml index fee9933bb8510..f65d295fa89d8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml @@ -149,9 +149,6 @@ setup: - is_false: fields.geo.keyword.on_aggregatable_indices --- "Get date_nanos field caps": - - skip: - version: " - 6.99.99" - reason: date_nanos field mapping type has been introcued in 7.0 - do: indices.create: @@ -204,9 +201,7 @@ setup: - is_false: fields.object\.nested2.keyword.non_searchable_indices --- "Get object and nested field caps": - - skip: - version: " - 6.99.99" - reason: object and nested fields are returned since 7.0 + - do: field_caps: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/100_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/100_mix_typeless_typeful.yml index d13229dbffbc6..11d1e6ea45c7f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/100_mix_typeless_typeful.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/100_mix_typeless_typeful.yml @@ -1,9 +1,6 @@ --- "GET with typeless API on an index that has types": - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - do: indices.create: # not using include_type_name: false on purpose diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml index 9183c70c29bce..11be0f2cd5f4e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml @@ -1,10 +1,6 @@ --- "Basic": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml index 67065270665cf..2b32a6ab819aa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml @@ -1,9 +1,7 @@ --- "Default values": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml index ab27842e4516e..207665b9c8849 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml @@ -1,9 +1,6 @@ --- "Stored fields": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: indices.create: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml index 9ba546d6ef942..9ce60a7e8c12a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml @@ -1,9 +1,7 @@ --- "Routing": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml index 38130cee59810..e5447fffdaf0f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml @@ -2,8 +2,6 @@ "REST test with headers": - skip: features: ["headers", "yaml"] - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml index ef4fa60bf1b0e..7f35bcae063df 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml @@ -1,9 +1,7 @@ --- "Realtime Refresh": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml index f4a5ba39be3b8..952375cedd6d6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml @@ -1,9 +1,7 @@ --- "Source filtering": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml index d7d8edfc65dcb..1e60246f97941 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml @@ -1,9 +1,6 @@ --- "Missing document with catch": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: catch: missing get: @@ -13,9 +10,6 @@ --- "Missing document with ignore": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: get: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml index 9037a9113e937..cafe6f86193f3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml @@ -1,9 +1,7 @@ --- "Versions": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml index 6f81c430c883a..7318602bb66d7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/10_basic.yml @@ -1,9 +1,7 @@ --- "Basic": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml index 57c11a1ca10e2..0e53f92ce4eaa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/15_default_values.yml @@ -2,9 +2,7 @@ "Default values": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/40_routing.yml index 6425f70f26aad..1396f2ab8ad22 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/40_routing.yml @@ -2,9 +2,7 @@ "Routing": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml index d39b07a6ce5f7..7891e7f84d92f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml @@ -1,9 +1,7 @@ --- "Realtime": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml index 2665458cea95d..6570524630e5a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/70_source_filtering.yml @@ -2,9 +2,7 @@ "Source filtering": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/80_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/80_missing.yml index b704fc2612007..d7d2975790606 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/80_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/80_missing.yml @@ -3,8 +3,6 @@ - skip: features: warnings - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: catch: missing @@ -17,8 +15,6 @@ - skip: features: warnings - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - do: get_source: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yml index c214bf87d3997..2ec0585b0f7bc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yml @@ -1,9 +1,7 @@ --- setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml index a129dcab80d9a..ffc55a8d2908a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml @@ -1,9 +1,7 @@ --- "Index with ID": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml index f8a50415a95ef..626ff1f443d64 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml @@ -1,9 +1,7 @@ --- "Index result field": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml index 073a4704b4ef8..eea7882319fba 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml @@ -1,9 +1,7 @@ --- "Index without ID": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml index c33a86093acab..aea2af3860365 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml @@ -1,9 +1,7 @@ --- "Optype": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml index 89aaa190af384..857c9d3c39c92 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml @@ -1,9 +1,7 @@ --- "External version": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml index 82421227adb7f..30a8cf453a7d1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml @@ -1,9 +1,7 @@ --- "External GTE version": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml index 630cf39dbe65c..f3cefc56a8e98 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml @@ -1,9 +1,7 @@ --- "Routing": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml index e16602d7ac8b6..8a76930ac266c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml @@ -1,9 +1,7 @@ --- "Refresh": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: @@ -48,9 +46,7 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: @@ -72,9 +68,7 @@ --- "refresh=wait_for waits until changes are visible in search": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml index 87d3b77aee329..324147bd4bcae 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/20_analyze_limit.yml @@ -9,9 +9,7 @@ setup: --- "_analyze with No. generated tokens less than or equal to index.analyze.max_token_count should succeed": - - skip: - version: " - 6.99.99" - reason: index.analyze.max_token_count setting has been added in 7.0.0 + - do: indices.analyze: index: test_1 @@ -25,9 +23,7 @@ setup: --- "_analyze with No. generated tokens more than index.analyze.max_token_count should fail": - - skip: - version: " - 6.99.99" - reason: index.analyze.max_token_count setting has been added in 7.0.0 + - do: catch: /The number of tokens produced by calling _analyze has exceeded the allowed maximum of \[3\]. This limit can be set by changing the \[index.analyze.max_token_count\] index level setting\./ indices.analyze: @@ -39,9 +35,7 @@ setup: --- "_analyze with explain with No. generated tokens more than index.analyze.max_token_count should fail": - - skip: - version: " - 6.99.99" - reason: index.analyze.max_token_count setting has been added in 7.0.0 + - do: catch: /The number of tokens produced by calling _analyze has exceeded the allowed maximum of \[3\]. This limit can be set by changing the \[index.analyze.max_token_count\] index level setting\./ indices.analyze: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml index 965083421cbaf..ba227556a8f11 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -1,9 +1,6 @@ --- "Create index with mappings": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: index: test_index @@ -20,9 +17,7 @@ --- "Create index with settings": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 + - do: indices.create: index: test_index @@ -39,9 +34,6 @@ --- "Create index": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: index: test_index @@ -52,9 +44,6 @@ --- "Create index with wait_for_active_shards set to all": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: index: test_index @@ -69,9 +58,6 @@ --- "Create index with aliases": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: index: test_index @@ -102,9 +88,7 @@ --- "Create index with write aliases": - - skip: - version: " - 6.99.99" - reason: is_write_index is not implemented in ES <= 6.x + - do: indices.create: index: test_index @@ -138,9 +122,7 @@ --- "Create index with explicit _doc type": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 + - do: catch: bad_request indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml index f5aeb53751119..e74ffd9cf1770 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml @@ -94,9 +94,7 @@ --- "Create index with write aliases": - - skip: - version: " - 6.99.99" - reason: is_write_index is not implemented in ES <= 6.x + - do: indices.create: include_type_name: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml index 6ba46c795da79..a4a61b0e598d1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.flush/10_basic.yml @@ -24,9 +24,7 @@ --- "Flush stats": - - skip: - version: " - 6.2.99" - reason: periodic flush stats is introduced in 6.3.0 + - do: indices.create: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml index 413c4bcb8d28c..3ca4417870376 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/11_basic_with_types.yml @@ -44,9 +44,7 @@ setup: --- "Test include_type_name": - - skip: - version: " - 6.6.99" - reason: the include_type_name parameter is not supported before 6.7 + - do: indices.get: @@ -66,9 +64,6 @@ setup: --- "Test include_type_name dafaults to false": - - skip: - version: " - 6.99.99" - reason: the include_type_name parameter default is different on 6.x and 7.0, so only test this on 7.0 clusters - do: indices.get: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml index 9be1f7246d5f3..f19f315edb514 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml @@ -1,8 +1,6 @@ --- setup: - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 + - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml index 1570ded351874..77b795686db4e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml @@ -1,8 +1,6 @@ --- "Return empty object if field doesn't exist, but type and index do": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 + - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml index 7db61d122e7ce..fe17f6515a007 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml @@ -1,8 +1,6 @@ --- setup: - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 + - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml index 162a8d340d48a..6a520c82aad24 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml @@ -1,10 +1,6 @@ --- "GET mapping with typeless API on an index that has types": - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - do: indices.create: # not using include_type_name: false on purpose include_type_name: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml index 0e08690868d9e..6aaad4301d61b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/10_basic.yml @@ -38,9 +38,6 @@ --- "Open index with wait_for_active_shards set to all": - - skip: - version: " - 6.0.99" - reason: wait_for_active_shards parameter was added in 6.1.0 - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml index dd0628ea993ee..60ef4cea1fca0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml @@ -60,10 +60,6 @@ --- "Can set is_write_index": - - skip: - version: " - 6.3.99" - reason: "is_write_index is only available from 6.4.0 on" - - do: indices.create: index: test_index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml index 3850ba4150b4f..4807efd8ae9c9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml @@ -1,8 +1,5 @@ --- "Put template": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.put_template: @@ -28,9 +25,6 @@ --- "Put multiple template": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.put_template: @@ -56,9 +50,6 @@ --- "Put template with empty mappings": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.put_template: @@ -241,9 +232,6 @@ --- "Put template with explicit _doc type": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0 - do: catch: bad_request diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml index ec9fabe02595d..50409fb983c4f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml @@ -1,8 +1,5 @@ --- "Max docs rollover conditions matches only primary shards": - - skip: - version: "- 5.4.1" - reason: "matching docs changed from all shards to primary shards" # create index with alias and replica - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/41_mapping_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/41_mapping_with_types.yml index 36389f3ce8bba..8522b87b0b3f9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/41_mapping_with_types.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/41_mapping_with_types.yml @@ -1,8 +1,5 @@ --- "Typeless mapping": - - skip: - version: " - 6.99.99" - reason: include_type_name defaults to true before 7.0.0 - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml index 41c851b71cc6c..54d3d38903bf3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml @@ -1,9 +1,7 @@ --- "Shrink index via API": - skip: - version: " - 6.9.99" - reason: expects warnings that pre-7.0.0 will not send - features: [warnings, arbitrary_key] + features: "arbitrary_key" # creates an index with one document solely allocated on a particular data node # and shrinks it into a new index with a single shard diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml index dec0760fc6b19..5b9c2540d499a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml @@ -1,9 +1,7 @@ --- "Shrink index ignores target template mapping": - skip: - version: " - 6.9.99" - reason: expects warnings that pre-7.0.0 will not send - features: [warnings, arbitrary_key] + features: "arbitrary_key" - do: nodes.info: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml index eda095ff91f98..11f2053512833 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml @@ -1,9 +1,7 @@ --- "Copy settings during shrink index": - skip: - version: " - 6.9.99" - reason: expects warnings that pre-7.0.0 will not send - features: [warnings, arbitrary_key] + features: "arbitrary_key" - do: nodes.info: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml index 1a650ee88eae6..a00282e586f49 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml @@ -39,9 +39,7 @@ setup: --- "Index - all": - - skip: - version: " - 6.3.99" - reason: "uuid is only available from 6.4.0 on" + - do: indices.stats: { index: _all } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml index 586a04f065cde..83b9c429bbdbf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml @@ -66,9 +66,7 @@ setup: --- "Translog last modified age stats": - - skip: - version: " - 6.2.99" - reason: translog last modified age stats was added in 6.3.0 + - do: index: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml index 798d699ae80a0..3b2b8103d882e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/10_basic.yml @@ -1,8 +1,6 @@ --- "Basic multi-get": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml index a1101a903f896..5b2e941e6c3d6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yml @@ -1,8 +1,6 @@ --- "Non-existent index": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml index 2711bed58dbb1..42f7a5039d64e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yml @@ -1,8 +1,6 @@ --- "Missing metadata": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml index 4ee569956397c..dcf83b2c9b597 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml @@ -1,8 +1,6 @@ --- "Multi Get with alias that resolves to multiple indices": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: bulk: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml index fbdc9b265a95a..3981d6d86e74d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yml @@ -1,8 +1,6 @@ --- "IDs": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml index d03f99be39517..7d732e5ad8748 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/17_default_index.yml @@ -1,8 +1,6 @@ --- "Default index/type": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml index 45460deb04e0b..01548972f9604 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_stored_fields.yml @@ -1,8 +1,6 @@ --- "Stored fields": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml index df2924f274bdf..923d620bc9400 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/40_routing.yml @@ -1,8 +1,6 @@ --- "Routing": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/60_realtime_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/60_realtime_refresh.yml index 3b1bfcdca556c..8ae390943c6b5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/60_realtime_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/60_realtime_refresh.yml @@ -1,8 +1,6 @@ --- "Realtime Refresh": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml index 3a3086cf3616d..a70151fd2e756 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/70_source_filtering.yml @@ -1,7 +1,5 @@ setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml index 0283455350a80..b6b7a84100dd7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated.yml @@ -2,8 +2,6 @@ --- "Deprecated parameters should fail in Multi Get query": - skip: - version: " - 6.99.99" - reason: _version, _routing are removed starting from 7.0, their equivalents without underscore are used instead features: "warnings" - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated_with_types.yml index 5033f75c79426..c65a650dae563 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated_with_types.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/80_deprecated_with_types.yml @@ -3,8 +3,6 @@ "Deprecated parameters should fail in Multi Get query": - skip: - version: " - 6.99.99" - reason: _version, _routing are removed starting from 7.0, their equivalents without underscore are used instead features: "warnings" - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml index 5b092c9d15e44..9f6f2e70ae46d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml @@ -94,9 +94,6 @@ setup: --- "Search with new response format": - - skip: - version: " - 6.99.99" - reason: hits.total is returned as an object in 7.0.0 - do: msearch: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml index 87c3e6065bba4..4edee1390462b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/10_basic.yml @@ -1,7 +1,5 @@ setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: index: testidx diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml index 376192680c99b..db8e566f4b613 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/20_deprecated.yml @@ -1,14 +1,7 @@ -setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - --- "Deprecated camel case and _ parameters should fail in Term Vectors query": - skip: - version: " - 6.99.99" - reason: camel case and _ parameters (e.g. versionType, _version_type) should fail from 7.0 features: "warnings" - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml index b0335498e22a1..3e39a33e1061d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml @@ -3,8 +3,6 @@ "Deprecated camel case and _ parameters should fail in Term Vectors query": - skip: - version: " - 6.99.99" - reason: camel case and _ parameters (e.g. versionType, _version_type) should fail from 7.0 features: "warnings" - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml index b14b5f94ebbc2..51d8e23dbfa62 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml @@ -1,10 +1,6 @@ --- "mtermvectors without types on an index that has types": - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - do: indices.create: # not using include_type_name: false on purpose include_type_name: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml index aa6d1e9841dd7..957d1dc20fbb7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml @@ -198,9 +198,7 @@ --- "Scroll cannot used the request cache": - - skip: - version: " - 6.99.99" - reason: the error message has been added in v7.0.0 + - do: indices.create: index: test_scroll @@ -217,9 +215,7 @@ --- "Scroll with size 0": - - skip: - version: " - 6.1.99" - reason: the error message has been added in v6.2.0 + - do: indices.create: index: test_scroll @@ -237,9 +233,6 @@ --- "Scroll max_score is null": - - skip: - version: " - 6.99.99" - reason: max_score was set to 0 rather than null before 7.0 - do: indices.create: @@ -285,9 +278,7 @@ --- "Scroll with new response format": - - skip: - version: " - 6.9.99" - reason: hits.total is returned as an object in 7.0.0 + - do: indices.create: index: test_scroll diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml index f655b43b98949..3eed5dc74151d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/12_slices.yml @@ -103,9 +103,6 @@ setup: --- "Sliced scroll with invalid arguments": - - skip: - version: " - 6.99.99" - reason: Prior versions return 500 rather than 404 - do: catch: bad_request diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml index 2ce0bab7aba95..bf50e5bb25fa0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yml @@ -651,10 +651,6 @@ setup: --- "Global ordinals are not loaded with the map execution hint": - - skip: - version: " - 6.99.99" - reason: bug fixed in 7.0 - - do: index: refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml index 2db498a0cacf0..df663a0d8937c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml @@ -1,7 +1,5 @@ setup: - - skip: - version: " - 6.99.99" - reason: "added in 7.0.0" + - do: indices.create: include_type_name: false diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml index b1f093c138048..5b39730057f34 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/40_range.yml @@ -210,9 +210,6 @@ setup: --- "IP Range Key Generation": - - skip: - version: " - 6.3.99" - reason: "Before 6.4.0, ip_range did not always generate bucket keys (see #21045)." - do: search: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml index c32cae9ff8239..1c88758173936 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/50_filter.yml @@ -31,9 +31,7 @@ setup: --- "Filter aggs with terms lookup and ensure it's cached": # Because the filter agg rewrites the terms lookup in the rewrite phase the request can be cached - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: search: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml index ab17ddb66587e..09b34ba6ebc59 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml @@ -207,8 +207,6 @@ setup: "Test typed keys parameter for date_histogram aggregation and max_bucket pipeline aggregation": - skip: features: warnings - version: " - 6.3.99" - reason: "deprecation added in 6.4.0" - do: search: rest_total_hits_as_int: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml index 629a6d4de34a1..7a3de2005c72d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml @@ -11,9 +11,7 @@ setup: --- "Nested inner hits": - - skip: - version: " - 6.1.99" - reason: "<= 6.1 nodes don't always include index or id in nested inner hits" + - do: index: index: test @@ -43,10 +41,6 @@ setup: --- "Nested doc version and seqIDs": - - skip: - version: " - 6.99.99" - reason: "Triggers warnings before 7.0" - - do: index: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml index 501fb1292da94..b15a48f52a43e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml @@ -141,9 +141,7 @@ setup: --- "docvalue_fields": - - skip: - version: " - 6.9.99" - reason: Triggers a deprecation warning before 7.0 + - do: search: body: @@ -152,9 +150,7 @@ setup: --- "multiple docvalue_fields": - - skip: - version: " - 6.9.99" - reason: Triggered a deprecation warning before 7.0 + - do: search: body: @@ -163,9 +159,7 @@ setup: --- "docvalue_fields as url param": - - skip: - version: " - 6.99.99" - reason: Triggered a deprecation warning before 7.0 + - do: search: docvalue_fields: [ "count" ] @@ -174,8 +168,6 @@ setup: --- "docvalue_fields with default format": - skip: - version: " - 6.99.99" - reason: Only triggers warnings on 7.0+ features: warnings - do: warnings: @@ -189,9 +181,7 @@ setup: --- "docvalue_fields with explicit format": - - skip: - version: " - 6.3.99" - reason: format option was added in 6.4 + - do: search: body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml index 99a7300abf1dd..a40ffe94d4ab5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml @@ -248,10 +248,6 @@ setup: --- "no hits and inner_hits max_score null": - - skip: - version: " - 6.99.99" - reason: max_score was set to 0 rather than null before 7.0 - - do: search: rest_total_hits_as_int: true @@ -319,10 +315,6 @@ setup: --- "field collapsing, inner_hits and version": - - skip: - version: " - 6.1.0" - reason: "bug fixed in 6.1.1" - - do: search: rest_total_hits_as_int: true @@ -371,9 +363,7 @@ setup: --- "field collapsing on a field alias": - - skip: - version: " - 6.3.99" - reason: Field aliases were introduced in 6.4.0. + - do: indices.put_mapping: include_type_name: false @@ -402,10 +392,6 @@ setup: --- "field collapsing, inner_hits and seq_no": - - skip: - version: " - 6.99.0" - reason: "sequence numbers introduced in 7.0.0" - - do: search: rest_total_hits_as_int: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml index b10401f48dbce..0b04385112af5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml @@ -1,8 +1,6 @@ --- "two levels fields collapsing": - - skip: - version: " - 6.99.99" - reason: using multiple field collapsing from 7.0 on + - do: indices.create: index: addresses diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml index d94e86bb6c565..183fefee80b96 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml @@ -588,9 +588,7 @@ setup: --- "Test exists query on _index field": - - skip: - version: " - 6.0.99" - reason: exists on _index not supported prior to 6.1.0 + - do: search: rest_total_hits_as_int: true @@ -604,9 +602,7 @@ setup: --- "Test exists query on _type field": - - skip: - version: " - 6.0.99" - reason: exists on _type not supported prior to 6.1.0 + - do: search: rest_total_hits_as_int: true @@ -646,9 +642,7 @@ setup: --- "Test exists query on _source field": - - skip: - version: " - 6.0.99" - reason: exists on _source not supported prior to 6.1.0 + - do: catch: /query_shard_exception/ search: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml index af9e276558a09..6c99cce0fe46a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml @@ -1,8 +1,6 @@ --- "Terms Query with No.of terms exceeding index.max_terms_count should FAIL": - - skip: - version: " - 6.99.99" - reason: index.max_terms_count setting has been added in 7.0.0 + - do: indices.create: include_type_name: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml index fd4621e48cad3..7ee665a47a6ec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml @@ -76,9 +76,7 @@ setup: --- "Search with new response format": - - skip: - version: " - 6.99.99" - reason: hits.total is returned as an object in 7.0.0 + - do: search: body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml index 8d7a77cac8859..130e6d42504b4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml @@ -1,7 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: "Implemented in 7.0" - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml index f5345ba57b1b7..1bbd7357f6869 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml @@ -1,7 +1,5 @@ setup: - - skip: - version: " - 6.99.99" - reason: "Implemented in 7.0" + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml index 17735c7fd451a..f6bb812ea8669 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yml @@ -65,9 +65,6 @@ setup: --- "Docvalues_fields size limit": - - skip: - version: " - 6.99.99" - reason: "Triggers warnings before 7.0" - do: catch: /Trying to retrieve too many docvalue_fields\. Must be less than or equal to[:] \[2\] but was \[3\]\. This limit can be set by changing the \[index.max_docvalue_fields_search\] index level setting\./ search: @@ -99,9 +96,6 @@ setup: --- "Regexp length limit": - - skip: - version: " - 6.99.99" - reason: "The regex length limit was introduced in 7.0.0" - do: catch: /The length of regex \[1110\] used in the Regexp Query request has exceeded the allowed maximum of \[1000\]\. This maximum can be set by changing the \[index.max_regex_length\] index level setting\./ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/40_indices_boost.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/40_indices_boost.yml index 37d53fe8a6d59..614fedf7fc82e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/40_indices_boost.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/40_indices_boost.yml @@ -37,7 +37,6 @@ setup: --- "Indices boost using object": - skip: - reason: deprecation was added in 5.2.0 features: "warnings" - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml index 5b7ac56361cc1..0a5a7260a27a8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yml @@ -40,9 +40,6 @@ setup: --- "Create a snapshot for missing index": - - skip: - version: " - 6.0.0" - reason: ignore_unavailable default is false in 6.0.0 - do: catch: missing diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml index aa15ca34ff0af..e2b7279f1cd68 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.get/10_basic.yml @@ -95,9 +95,6 @@ setup: --- "Get snapshot info contains include_global_state": - - skip: - version: " - 6.1.99" - reason: "include_global_state field has been added in the response in 6.2.0" - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml index b64a51141dc6e..de28dc7f16bad 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml @@ -292,9 +292,6 @@ setup: --- "Skip duplicates should work": - - skip: - version: " - 6.0.99" - reason: skip_duplicates was added in 6.1 - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml index e2c7ccfb421e3..6f3bae7f8e46f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml @@ -277,9 +277,6 @@ setup: --- "Skip duplicates with contexts should work": - - skip: - version: " - 6.0.99" - reason: skip_duplicates was added in 6.1 - do: index: @@ -333,9 +330,6 @@ setup: --- "Indexing and Querying without contexts is forbidden": - - skip: - version: " - 6.99.99" - reason: this feature was removed in 7.0 - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml index 1742134af2b75..68cbc68552a91 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml @@ -23,9 +23,7 @@ --- "tasks_list headers": - skip: - version: " - 6.99.99" features: headers - reason: task headers has been added in 7.0.0 - do: headers: { "X-Opaque-Id": "That is me" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/10_basic.yml index 62ec86118e5bb..23ec7eabfe128 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/10_basic.yml @@ -1,8 +1,4 @@ setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: indices.create: index: testidx diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml index 5f43e8a247923..eb7d335c28c60 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yml @@ -1,9 +1,3 @@ -setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - ---- "Term vector API should return 'found: false' for docs between index and refresh": - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml index 0cb6dfc06904b..8bfbee483690c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/30_realtime.yml @@ -1,9 +1,3 @@ -setup: - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - ---- "Realtime Term Vectors": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml index 4382442dee4dd..5801025654cf6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml @@ -1,10 +1,6 @@ --- "Term vectors with typeless API on an index that has types": - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - do: indices.create: # not using include_type_name: false on purpose include_type_name: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml index 3a35ad46f9161..13788af7e35c5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml @@ -1,9 +1,7 @@ --- "Partial document": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml index 41dba3551e64c..5782c8286fb60 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml @@ -1,9 +1,7 @@ --- "Update check shard header": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml index 657c036291bd6..6c69bc2aa993b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml @@ -1,9 +1,7 @@ --- "Update result field": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: update: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml index a849eecc66629..39e2273d5cafb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml @@ -1,9 +1,7 @@ --- "Doc upsert": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: update: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml index 5bdc3ecea75fc..0d695cb754056 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml @@ -1,9 +1,7 @@ --- "Doc as upsert": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: update: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml index 6f43d381e0537..1a91beebbb15d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml @@ -1,9 +1,7 @@ --- "Routing": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml index 3a74f75f4f11d..77888fcbb2710 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml @@ -1,9 +1,7 @@ --- "Refresh": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: indices.create: @@ -52,9 +50,7 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: @@ -85,9 +81,7 @@ --- "refresh=wait_for waits until changes are visible in search": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml index 9e6d5a4671955..c8eeba967a6f7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml @@ -1,9 +1,7 @@ --- "Source filtering": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 + - do: update: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/90_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/90_mix_typeless_typeful.yml index 0ca25e8598c24..fff183daedc37 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/90_mix_typeless_typeful.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/90_mix_typeless_typeful.yml @@ -1,10 +1,6 @@ --- "Update with typeless API on an index that has types": - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - do: indices.create: # not using include_type_name: false on purpose include_type_name: true @@ -42,10 +38,6 @@ --- "Update call that introduces new field mappings": - - skip: - version: " - 6.99.99" - reason: Typeless APIs were introduced in 7.0.0 - - do: indices.create: # not using include_type_name: false on purpose include_type_name: true diff --git a/server/src/main/java/org/elasticsearch/Build.java b/server/src/main/java/org/elasticsearch/Build.java index dcf827091f54e..be37c56837d70 100644 --- a/server/src/main/java/org/elasticsearch/Build.java +++ b/server/src/main/java/org/elasticsearch/Build.java @@ -241,7 +241,13 @@ public static void writeBuild(Build build, StreamOutput out) throws IOException out.writeString(build.flavor().displayName()); } if (out.getVersion().onOrAfter(Version.V_6_3_0)) { - out.writeString(build.type().displayName()); + final Type buildType; + if (out.getVersion().before(Version.V_6_7_0) && build.type() == Type.DOCKER) { + buildType = Type.TAR; + } else { + buildType = build.type(); + } + out.writeString(buildType.displayName()); } out.writeString(build.shortHash()); out.writeString(build.date()); diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java index ab3971c32838b..19d3a66a87d32 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java @@ -23,6 +23,7 @@ import java.io.InputStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.NoSuchFileException; +import java.util.List; import java.util.Map; /** @@ -96,8 +97,9 @@ public interface BlobContainer { * @throws IOException if the input stream could not be read, or the target blob could not be written to. */ void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException; + /** - * Deletes a blob with giving name, if the blob exists. If the blob does not exist, + * Deletes the blob with the given name, if the blob exists. If the blob does not exist, * this method throws a NoSuchFileException. * * @param blobName @@ -107,6 +109,33 @@ public interface BlobContainer { */ void deleteBlob(String blobName) throws IOException; + /** + * Deletes the blobs with given names. Unlike {@link #deleteBlob(String)} this method will not throw an exception + * when one or multiple of the given blobs don't exist and simply ignore this case. + * + * @param blobNames The names of the blob to delete. + * @throws IOException if a subset of blob exists but could not be deleted. + */ + default void deleteBlobsIgnoringIfNotExists(List blobNames) throws IOException { + IOException ioe = null; + for (String blobName : blobNames) { + try { + deleteBlob(blobName); + } catch (NoSuchFileException e) { + // ignored + } catch (IOException e) { + if (ioe == null) { + ioe = e; + } else { + ioe.addSuppressed(e); + } + } + } + if (ioe != null) { + throw ioe; + } + } + /** * Deletes a blob with giving name, ignoring if the blob does not exist. * diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 2379b4f00c2bf..027d360153f2b 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -456,7 +456,7 @@ public class DateFormatters { .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(NANO_OF_SECOND, 3, 9, true) + .appendFraction(NANO_OF_SECOND, 1, 9, true) .appendZoneOrOffsetId() .toFormatter(Locale.ROOT), new DateTimeFormatterBuilder() @@ -465,7 +465,7 @@ public class DateFormatters { .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(NANO_OF_SECOND, 3, 9, true) + .appendFraction(NANO_OF_SECOND, 1, 9, true) .append(TIME_ZONE_FORMATTER_NO_COLON) .toFormatter(Locale.ROOT) ); @@ -517,12 +517,20 @@ public class DateFormatters { private static final DateFormatter STRICT_HOUR_MINUTE_SECOND = new JavaDateFormatter("strict_hour_minute_second", STRICT_HOUR_MINUTE_SECOND_FORMATTER); + private static final DateTimeFormatter STRICT_DATE_PRINTER = new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .appendFraction(NANO_OF_SECOND, 3, 9, true) + .appendOffset("+HH:MM", "Z") + .toFormatter(Locale.ROOT); + private static final DateTimeFormatter STRICT_DATE_FORMATTER = new DateTimeFormatterBuilder() .append(STRICT_YEAR_MONTH_DAY_FORMATTER) .appendLiteral('T') .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) .optionalStart() - .appendFraction(NANO_OF_SECOND, 3, 9, true) + .appendFraction(NANO_OF_SECOND, 1, 9, true) .optionalEnd() .toFormatter(Locale.ROOT); @@ -530,8 +538,7 @@ public class DateFormatters { * Returns a formatter that combines a full date and time, separated by a 'T' * (yyyy-MM-dd'T'HH:mm:ss.SSSZZ). */ - private static final DateFormatter STRICT_DATE_TIME = new JavaDateFormatter("strict_date_time", - new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).appendOffset("+HH:MM", "Z").toFormatter(Locale.ROOT), + private static final DateFormatter STRICT_DATE_TIME = new JavaDateFormatter("strict_date_time", STRICT_DATE_PRINTER, new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) ); @@ -653,7 +660,7 @@ public class DateFormatters { private static final DateFormatter STRICT_HOUR_MINUTE = new JavaDateFormatter("strict_hour_minute", DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT)); - private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder() + private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_PRINTER = new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .appendLiteral('-') .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) @@ -666,12 +673,25 @@ public class DateFormatters { .optionalEnd() .toFormatter(Locale.ROOT); + private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) + .appendLiteral('T') + .appendPattern("HH:mm") + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(NANO_OF_SECOND, 1, 9, true) + .optionalEnd() + .toFormatter(Locale.ROOT); + /* * Returns a formatter for a full ordinal date and time, using a four * digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ss.SSSZZ). */ private static final DateFormatter STRICT_ORDINAL_DATE_TIME = new JavaDateFormatter("strict_ordinal_date_time", - new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_PRINTER) .appendOffset("+HH:MM", "Z").toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) .appendZoneOrOffsetId().toFormatter(Locale.ROOT), @@ -1198,7 +1218,7 @@ public class DateFormatters { * digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ss.SSSZZ). */ private static final DateFormatter ORDINAL_DATE_TIME = new JavaDateFormatter("ordinal_date_time", - new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_PRINTER) .appendOffset("+HH:MM", "Z").toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_FORMATTER_BASE) .appendZoneOrOffsetId().toFormatter(Locale.ROOT), diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index bd6fd908d49ab..30361fa70ee6b 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -320,7 +320,14 @@ long writeManifestAndCleanup(String reason, Manifest manifest) throws WriteState finished = true; return generation; } catch (WriteStateException e) { - rollback(); + // if Manifest write results in dirty WriteStateException it's not safe to remove + // new metadata files, because if Manifest was actually written to disk and its deletion + // fails it will reference these new metadata files. + // In the future, we might decide to add more fine grained check to understand if after + // WriteStateException Manifest deletion has actually failed. + if (e.isDirty() == false) { + rollback(); + } throw e; } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index 6aad80c4421e4..afd1d9e368480 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -112,15 +112,15 @@ protected void resize(int newSize) { } public long getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return values[0]; + return get(0); } @Override public Long get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return values[index]; } @@ -151,15 +151,15 @@ public Dates(SortedNumericDocValues in, boolean isNanos) { * in. */ public JodaCompatibleZonedDateTime getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } return get(0); } @Override public JodaCompatibleZonedDateTime get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } if (index >= count) { throw new IndexOutOfBoundsException( "attempted to fetch the [" + index + "] date when there are only [" @@ -240,15 +240,15 @@ public SortedNumericDoubleValues getInternalValues() { } public double getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return values[0]; + return get(0); } @Override public Double get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return values[index]; } @@ -297,11 +297,7 @@ protected void resize(int newSize) { } public GeoPoint getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return values[0]; + return get(0); } public double getLat() { @@ -330,6 +326,10 @@ public double getLon() { @Override public GeoPoint get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } final GeoPoint point = values[index]; return new GeoPoint(point.lat(), point.lon()); } @@ -409,15 +409,15 @@ protected void resize(int newSize) { } public boolean getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return values[0]; + return get(0); } @Override public Boolean get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return values[index]; } @@ -492,14 +492,14 @@ public Strings(SortedBinaryDocValues in) { @Override public String get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return values[index].get().utf8ToString(); } public String getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } return get(0); } } @@ -512,6 +512,10 @@ public BytesRefs(SortedBinaryDocValues in) { @Override public BytesRef get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } /** * We need to make a copy here because {@link BinaryScriptDocValues} might reuse the * returned value and the same instance might be used to @@ -521,10 +525,6 @@ public BytesRef get(int index) { } public BytesRef getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } return get(0); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 5790248ead807..8a3203ad8e7e0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -813,18 +813,33 @@ protected String contentType() { return CONTENT_TYPE; } + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + TextFieldMapper mapper = (TextFieldMapper) super.updateFieldType(fullNameToFieldType); + if (mapper.prefixFieldMapper != null) { + mapper.prefixFieldMapper = (PrefixFieldMapper) mapper.prefixFieldMapper.updateFieldType(fullNameToFieldType); + } + if (mapper.phraseFieldMapper != null) { + mapper.phraseFieldMapper = (PhraseFieldMapper) mapper.phraseFieldMapper.updateFieldType(fullNameToFieldType); + } + return mapper; + } + @Override protected void doMerge(Mapper mergeWith) { super.doMerge(mergeWith); TextFieldMapper mw = (TextFieldMapper) mergeWith; + if (this.prefixFieldMapper != null && mw.prefixFieldMapper != null) { this.prefixFieldMapper = (PrefixFieldMapper) this.prefixFieldMapper.merge(mw.prefixFieldMapper); - } - else if (this.prefixFieldMapper != null || mw.prefixFieldMapper != null) { + } else if (this.prefixFieldMapper != null || mw.prefixFieldMapper != null) { throw new IllegalArgumentException("mapper [" + name() + "] has different index_prefix settings, current [" + this.prefixFieldMapper + "], merged [" + mw.prefixFieldMapper + "]"); } - else if (this.fieldType().indexPhrases != mw.fieldType().indexPhrases) { + + if (this.phraseFieldMapper != null && mw.phraseFieldMapper != null) { + this.phraseFieldMapper = (PhraseFieldMapper) this.phraseFieldMapper.merge(mw.phraseFieldMapper); + } else if (this.fieldType().indexPhrases != mw.fieldType().indexPhrases) { throw new IllegalArgumentException("mapper [" + name() + "] has different index_phrases settings, current [" + this.fieldType().indexPhrases + "], merged [" + mw.fieldType().indexPhrases + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 97d1939c1b292..ff1922a231dc8 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -947,9 +947,7 @@ public FlushStats flushStats() { public DocsStats docStats() { readAllowed(); - DocsStats docsStats = getEngine().docStats(); - markSearcherAccessed(); - return docsStats; + return getEngine().docStats(); } /** @@ -1028,11 +1026,7 @@ public TranslogStats translogStats() { public CompletionStats completionStats(String... fields) { readAllowed(); try { - CompletionStats stats = getEngine().completionStats(fields); - // we don't wait for a pending refreshes here since it's a stats call instead we mark it as accessed only which will cause - // the next scheduled refresh to go through and refresh the stats as well - markSearcherAccessed(); - return stats; + return getEngine().completionStats(fields); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index e8380a77962d4..b8eda3303377d 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -19,6 +19,7 @@ package org.elasticsearch.monitor.jvm; +import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.SuppressForbidden; @@ -171,7 +172,11 @@ private static boolean usingBundledJdk() { */ final String javaHome = System.getProperty("java.home"); final String userDir = System.getProperty("user.dir"); - return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk").toAbsolutePath()); + if (Constants.MAC_OS_X) { + return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk/Contents/Home").toAbsolutePath()); + } else { + return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk").toAbsolutePath()); + } } public static JvmInfo jvmInfo() { diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index a73626351d875..11ae491c8e7cb 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -101,7 +101,6 @@ import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; -import java.nio.file.DirectoryNotEmptyException; import java.nio.file.FileAlreadyExistsException; import java.nio.file.NoSuchFileException; import java.util.ArrayList; @@ -135,11 +134,11 @@ * |- Ac1342-B_x/ - data for index "foo" which was assigned the unique id of Ac1342-B_x in the repository * | |- meta-20131010.dat - JSON Serialized IndexMetaData for index "foo" * | |- 0/ - data for shard "0" of index "foo" - * | | |- __1 \ - * | | |- __2 | - * | | |- __3 |- files from different segments see snapshot-* for their mappings to real segment files - * | | |- __4 | - * | | |- __5 / + * | | |- __1 \ (files with numeric names were created by older ES versions) + * | | |- __2 | + * | | |- __VPO5oDMVT5y4Akv8T_AO_A |- files from different segments see snap-* for their mappings to real segment files + * | | |- __1gbJy18wS_2kv1qI7FgKuQ | + * | | |- __R8JvZAHlSMyMXyZc2SS8Zg / * | | ..... * | | |- snap-20131010.dat - JSON serialized BlobStoreIndexShardSnapshot for snapshot "20131010" * | | |- snap-20131011.dat - JSON serialized BlobStoreIndexShardSnapshot for snapshot "20131011" @@ -162,8 +161,6 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp protected final RepositoryMetaData metadata; - protected final NamedXContentRegistry namedXContentRegistry; - private static final int BUFFER_SIZE = 4096; private static final String SNAPSHOT_PREFIX = "snap-"; @@ -213,11 +210,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp private final CounterMetric restoreRateLimitingTimeInNanos = new CounterMetric(); - private ChecksumBlobStoreFormat globalMetaDataFormat; + private final ChecksumBlobStoreFormat globalMetaDataFormat; - private ChecksumBlobStoreFormat indexMetaDataFormat; + private final ChecksumBlobStoreFormat indexMetaDataFormat; - private ChecksumBlobStoreFormat snapshotFormat; + private final ChecksumBlobStoreFormat snapshotFormat; private final boolean readOnly; @@ -240,17 +237,21 @@ protected BlobStoreRepository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry) { this.settings = settings; this.metadata = metadata; - this.namedXContentRegistry = namedXContentRegistry; this.compress = COMPRESS_SETTING.get(metadata.settings()); snapshotRateLimiter = getRateLimiter(metadata.settings(), "max_snapshot_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB)); restoreRateLimiter = getRateLimiter(metadata.settings(), "max_restore_bytes_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB)); readOnly = metadata.settings().getAsBoolean("readonly", false); - indexShardSnapshotFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_CODEC, SNAPSHOT_NAME_FORMAT, BlobStoreIndexShardSnapshot::fromXContent, namedXContentRegistry, compress); indexShardSnapshotsFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_INDEX_CODEC, SNAPSHOT_INDEX_NAME_FORMAT, BlobStoreIndexShardSnapshots::fromXContent, namedXContentRegistry, compress); + globalMetaDataFormat = new ChecksumBlobStoreFormat<>(METADATA_CODEC, METADATA_NAME_FORMAT, + MetaData::fromXContent, namedXContentRegistry, compress); + indexMetaDataFormat = new ChecksumBlobStoreFormat<>(INDEX_METADATA_CODEC, METADATA_NAME_FORMAT, + IndexMetaData::fromXContent, namedXContentRegistry, compress); + snapshotFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_CODEC, SNAPSHOT_NAME_FORMAT, + SnapshotInfo::fromXContentInternal, namedXContentRegistry, compress); } @Override @@ -259,12 +260,6 @@ protected void doStart() { if (chunkSize != null && chunkSize.getBytes() <= 0) { throw new IllegalArgumentException("the chunk size cannot be negative: [" + chunkSize + "]"); } - globalMetaDataFormat = new ChecksumBlobStoreFormat<>(METADATA_CODEC, METADATA_NAME_FORMAT, - MetaData::fromXContent, namedXContentRegistry, compress); - indexMetaDataFormat = new ChecksumBlobStoreFormat<>(INDEX_METADATA_CODEC, METADATA_NAME_FORMAT, - IndexMetaData::fromXContent, namedXContentRegistry, compress); - snapshotFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_CODEC, SNAPSHOT_NAME_FORMAT, - SnapshotInfo::fromXContentInternal, namedXContentRegistry, compress); } @Override @@ -470,22 +465,16 @@ public void deleteSnapshot(SnapshotId snapshotId, long repositoryStateId) { final Collection indicesToCleanUp = Sets.newHashSet(repositoryData.getIndices().values()); indicesToCleanUp.removeAll(updatedRepositoryData.getIndices().values()); final BlobContainer indicesBlobContainer = blobStore().blobContainer(basePath().add("indices")); - for (final IndexId indexId : indicesToCleanUp) { try { - indicesBlobContainer.deleteBlobIgnoringIfNotExists(indexId.getId()); - } catch (DirectoryNotEmptyException dnee) { - // if the directory isn't empty for some reason, it will fail to clean up; - // we'll ignore that and accept that cleanup didn't fully succeed. - // since we are using UUIDs for path names, this won't be an issue for - // snapshotting indices of the same name - logger.warn(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, " + - "but failed to clean up its index folder due to the directory not being empty.", metadata.name(), indexId), dnee); + indicesBlobContainer.deleteBlobsIgnoringIfNotExists( + indicesToCleanUp.stream().map(IndexId::getId).collect(Collectors.toList())); } catch (IOException ioe) { // a different IOException occurred while trying to delete - will just log the issue for now - logger.warn(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, " + - "but failed to clean up its index folder.", metadata.name(), indexId), ioe); + logger.warn(() -> + new ParameterizedMessage( + "[{}] indices {} are no longer part of any snapshots in the repository, " + + "but failed to clean up their index folders.", metadata.name(), indicesToCleanUp), ioe); } - } } catch (IOException | ResourceNotFoundException ex) { throw new RepositoryException(metadata.name(), "failed to delete snapshot [" + snapshotId + "]", ex); } @@ -1022,16 +1011,14 @@ protected void finalize(final List snapshots, try { // Delete temporary index files first, as we might otherwise fail in the next step creating the new index file if an earlier // attempt to write an index file with this generation failed mid-way after creating the temporary file. - for (final String blobName : blobs.keySet()) { - if (FsBlobContainer.isTempBlobName(blobName)) { - try { - blobContainer.deleteBlobIgnoringIfNotExists(blobName); - } catch (IOException e) { - logger.warn(() -> new ParameterizedMessage("[{}][{}] failed to delete index blob [{}] during finalization", - snapshotId, shardId, blobName), e); - throw e; - } - } + final List blobNames = + blobs.keySet().stream().filter(FsBlobContainer::isTempBlobName).collect(Collectors.toList()); + try { + blobContainer.deleteBlobsIgnoringIfNotExists(blobNames); + } catch (IOException e) { + logger.warn(() -> new ParameterizedMessage("[{}][{}] failed to delete index blobs {} during finalization", + snapshotId, shardId, blobNames), e); + throw e; } // If we deleted all snapshots, we don't need to create a new index file @@ -1040,28 +1027,26 @@ protected void finalize(final List snapshots, } // Delete old index files - for (final String blobName : blobs.keySet()) { - if (blobName.startsWith(SNAPSHOT_INDEX_PREFIX)) { - try { - blobContainer.deleteBlobIgnoringIfNotExists(blobName); - } catch (IOException e) { - logger.warn(() -> new ParameterizedMessage("[{}][{}] failed to delete index blob [{}] during finalization", - snapshotId, shardId, blobName), e); - throw e; - } - } + final List indexBlobs = + blobs.keySet().stream().filter(blob -> blob.startsWith(SNAPSHOT_INDEX_PREFIX)).collect(Collectors.toList()); + try { + blobContainer.deleteBlobsIgnoringIfNotExists(indexBlobs); + } catch (IOException e) { + logger.warn(() -> new ParameterizedMessage("[{}][{}] failed to delete index blobs {} during finalization", + snapshotId, shardId, indexBlobs), e); + throw e; } // Delete all blobs that don't exist in a snapshot - for (final String blobName : blobs.keySet()) { - if (blobName.startsWith(DATA_BLOB_PREFIX) && (updatedSnapshots.findNameFile(canonicalName(blobName)) == null)) { - try { - blobContainer.deleteBlobIgnoringIfNotExists(blobName); - } catch (IOException e) { - logger.warn(() -> new ParameterizedMessage("[{}][{}] failed to delete data blob [{}] during finalization", - snapshotId, shardId, blobName), e); - } - } + final List orphanedBlobs = blobs.keySet().stream() + .filter(blobName -> + blobName.startsWith(DATA_BLOB_PREFIX) && updatedSnapshots.findNameFile(canonicalName(blobName)) == null) + .collect(Collectors.toList()); + try { + blobContainer.deleteBlobsIgnoringIfNotExists(orphanedBlobs); + } catch (IOException e) { + logger.warn(() -> new ParameterizedMessage("[{}][{}] failed to delete data blobs {} during finalization", + snapshotId, shardId, orphanedBlobs), e); } } catch (IOException e) { String message = "Failed to finalize " + reason + " with shard index [" + currentIndexGen + "]"; @@ -1069,41 +1054,6 @@ protected void finalize(final List snapshots, } } - /** - * Generates blob name - * - * @param generation the blob number - * @return the blob name - */ - protected String fileNameFromGeneration(long generation) { - return DATA_BLOB_PREFIX + Long.toString(generation, Character.MAX_RADIX); - } - - /** - * Finds the next available blob number - * - * @param blobs list of blobs in the repository - * @return next available blob number - */ - protected long findLatestFileNameGeneration(Map blobs) { - long generation = -1; - for (String name : blobs.keySet()) { - if (!name.startsWith(DATA_BLOB_PREFIX)) { - continue; - } - name = canonicalName(name); - try { - long currentGen = Long.parseLong(name.substring(DATA_BLOB_PREFIX.length()), Character.MAX_RADIX); - if (currentGen > generation) { - generation = currentGen; - } - } catch (NumberFormatException e) { - logger.warn("file [{}] does not conform to the '{}' schema", name, DATA_BLOB_PREFIX); - } - } - return generation; - } - /** * Loads all available snapshots in the repository * @@ -1196,7 +1146,6 @@ public void snapshot(final IndexCommit snapshotIndexCommit) { throw new IndexShardSnapshotFailedException(shardId, "failed to list blobs", e); } - long generation = findLatestFileNameGeneration(blobs); Tuple tuple = buildBlobStoreIndexShardSnapshots(blobs); BlobStoreIndexShardSnapshots snapshots = tuple.v1(); int fileListGeneration = tuple.v2(); @@ -1264,7 +1213,7 @@ public void snapshot(final IndexCommit snapshotIndexCommit) { indexIncrementalSize += md.length(); // create a new FileInfo BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = - new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), md, chunkSize()); + new BlobStoreIndexShardSnapshot.FileInfo(DATA_BLOB_PREFIX + UUIDs.randomBase64UUID(), md, chunkSize()); indexCommitPointFiles.add(snapshotFileInfo); filesToSnapshot.add(snapshotFileInfo); } else { diff --git a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index e3e986c1eca9a..d2a27cc2bb3ef 100644 --- a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -63,7 +63,7 @@ public class FsRepository extends BlobStoreRepository { new ByteSizeValue(Long.MAX_VALUE), new ByteSizeValue(5), new ByteSizeValue(Long.MAX_VALUE), Property.NodeScope); private final Environment environment; - private ByteSizeValue chunkSize; + private final ByteSizeValue chunkSize; private final BlobPath basePath; diff --git a/server/src/test/java/org/elasticsearch/BuildTests.java b/server/src/test/java/org/elasticsearch/BuildTests.java index 1f99a1f4542b5..12af1d31841cf 100644 --- a/server/src/test/java/org/elasticsearch/BuildTests.java +++ b/server/src/test/java/org/elasticsearch/BuildTests.java @@ -20,15 +20,23 @@ package org.elasticsearch; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Arrays; +import java.util.List; +import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; +import static org.hamcrest.Matchers.equalTo; + public class BuildTests extends ESTestCase { /** Asking for the jar metadata should not throw exception in tests, no matter how configured */ @@ -115,4 +123,103 @@ public void testEqualsAndHashCode() { ); assertNotEquals(build, differentVersion); } + + private static class WriteableBuild implements Writeable { + private final Build build; + + WriteableBuild(StreamInput in) throws IOException { + build = Build.readBuild(in); + } + + WriteableBuild(Build build) { + this.build = build; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + Build.writeBuild(build, out); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WriteableBuild that = (WriteableBuild) o; + return build.equals(that.build); + } + + @Override + public int hashCode() { + return Objects.hash(build); + } + } + + private static String randomStringExcept(final String s) { + return randomAlphaOfLength(13 - s.length()); + } + + public void testSerialization() { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(new WriteableBuild(new Build( + randomFrom(Build.Flavor.values()), randomFrom(Build.Type.values()), + randomAlphaOfLength(6), randomAlphaOfLength(6), randomBoolean(), randomAlphaOfLength(6))), + b -> copyWriteable(b, writableRegistry(), WriteableBuild::new, Version.CURRENT), + b -> { + switch (randomIntBetween(1, 6)) { + case 1: + return new WriteableBuild(new Build( + randomValueOtherThan(b.build.flavor(), () -> randomFrom(Build.Flavor.values())), b.build.type(), + b.build.shortHash(), b.build.date(), b.build.isSnapshot(), b.build.getQualifiedVersion())); + case 2: + return new WriteableBuild(new Build(b.build.flavor(), + randomValueOtherThan(b.build.type(), () -> randomFrom(Build.Type.values())), + b.build.shortHash(), b.build.date(), b.build.isSnapshot(), b.build.getQualifiedVersion())); + case 3: + return new WriteableBuild(new Build(b.build.flavor(), b.build.type(), + randomStringExcept(b.build.shortHash()), b.build.date(), b.build.isSnapshot(), b.build.getQualifiedVersion())); + case 4: + return new WriteableBuild(new Build(b.build.flavor(), b.build.type(), + b.build.shortHash(), randomStringExcept(b.build.date()), b.build.isSnapshot(), b.build.getQualifiedVersion())); + case 5: + return new WriteableBuild(new Build(b.build.flavor(), b.build.type(), + b.build.shortHash(), b.build.date(), b.build.isSnapshot() == false, b.build.getQualifiedVersion())); + case 6: + return new WriteableBuild(new Build(b.build.flavor(), b.build.type(), + b.build.shortHash(), b.build.date(), b.build.isSnapshot(), randomStringExcept(b.build.getQualifiedVersion()))); + } + throw new AssertionError(); + }); + } + + public void testSerializationBWC() throws IOException { + final WriteableBuild dockerBuild = new WriteableBuild(new Build(randomFrom(Build.Flavor.values()), Build.Type.DOCKER, + randomAlphaOfLength(6), randomAlphaOfLength(6), randomBoolean(), randomAlphaOfLength(6))); + + final List versions = Version.getDeclaredVersions(Version.class); + final Version pre63Version = randomFrom(versions.stream().filter(v -> v.before(Version.V_6_3_0)).collect(Collectors.toList())); + final Version post63Pre67Version = randomFrom(versions.stream() + .filter(v -> v.onOrAfter(Version.V_6_3_0) && v.before(Version.V_6_7_0)).collect(Collectors.toList())); + final Version post67Pre70Version = randomFrom(versions.stream() + .filter(v -> v.onOrAfter(Version.V_6_7_0) && v.before(Version.V_7_0_0)).collect(Collectors.toList())); + final Version post70Version = randomFrom(versions.stream().filter(v -> v.onOrAfter(Version.V_7_0_0)).collect(Collectors.toList())); + + final WriteableBuild pre63 = copyWriteable(dockerBuild, writableRegistry(), WriteableBuild::new, pre63Version); + final WriteableBuild post63pre67 = copyWriteable(dockerBuild, writableRegistry(), WriteableBuild::new, post63Pre67Version); + final WriteableBuild post67pre70 = copyWriteable(dockerBuild, writableRegistry(), WriteableBuild::new, post67Pre70Version); + final WriteableBuild post70 = copyWriteable(dockerBuild, writableRegistry(), WriteableBuild::new, post70Version); + + assertThat(pre63.build.flavor(), equalTo(Build.Flavor.OSS)); + assertThat(post63pre67.build.flavor(), equalTo(dockerBuild.build.flavor())); + assertThat(post67pre70.build.flavor(), equalTo(dockerBuild.build.flavor())); + assertThat(post70.build.flavor(), equalTo(dockerBuild.build.flavor())); + + assertThat(pre63.build.type(), equalTo(Build.Type.UNKNOWN)); + assertThat(post63pre67.build.type(), equalTo(Build.Type.TAR)); + assertThat(post67pre70.build.type(), equalTo(dockerBuild.build.type())); + assertThat(post70.build.type(), equalTo(dockerBuild.build.type())); + + assertThat(pre63.build.getQualifiedVersion(), equalTo(pre63Version.toString())); + assertThat(post63pre67.build.getQualifiedVersion(), equalTo(post63Pre67Version.toString())); + assertThat(post67pre70.build.getQualifiedVersion(), equalTo(post67Pre70Version.toString())); + assertThat(post70.build.getQualifiedVersion(), equalTo(dockerBuild.build.getQualifiedVersion())); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java index d7e9767d7a14d..81fd5ec7fc83d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -114,4 +115,23 @@ public void testMappingVersionUnchanged() throws Exception { assertThat(result.resultingState.metaData().index("test").getMappingVersion(), equalTo(previousVersion)); } + public void testMappingUpdateAccepts_docAsType() throws Exception { + final IndexService indexService = createIndex("test", + client().admin().indices().prepareCreate("test").addMapping("my_type")); + final MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class); + final ClusterService clusterService = getInstanceFromNode(ClusterService.class); + final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest() + .type(MapperService.SINGLE_MAPPING_NAME); + request.indices(new Index[] {indexService.index()}); + request.source("{ \"properties\": { \"foo\": { \"type\": \"keyword\" } }}"); + final ClusterStateTaskExecutor.ClusterTasksResult result = + mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request)); + assertThat(result.executionResults.size(), equalTo(1)); + assertTrue(result.executionResults.values().iterator().next().isSuccess()); + MappingMetaData mappingMetaData = result.resultingState.metaData().index("test").mapping(); + assertEquals("my_type", mappingMetaData.type()); + assertEquals(Collections.singletonMap("properties", + Collections.singletonMap("foo", + Collections.singletonMap("type", "keyword"))), mappingMetaData.sourceAsMap()); + } } diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java index 5798b5f799203..c3a541fe87ec2 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java @@ -100,6 +100,7 @@ public void testDuellingFormatsValidParsing() { assertSameDate("20181126T121212+01:00", "basic_date_time_no_millis"); assertSameDate("20181126T121212+0100", "basic_date_time_no_millis"); assertSameDate("2018363", "basic_ordinal_date"); + assertSameDate("2018363T121212.1Z", "basic_ordinal_date_time"); assertSameDate("2018363T121212.123Z", "basic_ordinal_date_time"); assertSameDate("2018363T121212.123456789Z", "basic_ordinal_date_time"); assertSameDate("2018363T121212.123+0100", "basic_ordinal_date_time"); @@ -107,15 +108,19 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018363T121212Z", "basic_ordinal_date_time_no_millis"); assertSameDate("2018363T121212+0100", "basic_ordinal_date_time_no_millis"); assertSameDate("2018363T121212+01:00", "basic_ordinal_date_time_no_millis"); + assertSameDate("121212.1Z", "basic_time"); assertSameDate("121212.123Z", "basic_time"); assertSameDate("121212.123456789Z", "basic_time"); + assertSameDate("121212.1+0100", "basic_time"); assertSameDate("121212.123+0100", "basic_time"); assertSameDate("121212.123+01:00", "basic_time"); assertSameDate("121212Z", "basic_time_no_millis"); assertSameDate("121212+0100", "basic_time_no_millis"); assertSameDate("121212+01:00", "basic_time_no_millis"); + assertSameDate("T121212.1Z", "basic_t_time"); assertSameDate("T121212.123Z", "basic_t_time"); assertSameDate("T121212.123456789Z", "basic_t_time"); + assertSameDate("T121212.1+0100", "basic_t_time"); assertSameDate("T121212.123+0100", "basic_t_time"); assertSameDate("T121212.123+01:00", "basic_t_time"); assertSameDate("T121212Z", "basic_t_time_no_millis"); @@ -124,6 +129,7 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018W313", "basic_week_date"); assertSameDate("1W313", "basic_week_date"); assertSameDate("18W313", "basic_week_date"); + assertSameDate("2018W313T121212.1Z", "basic_week_date_time"); assertSameDate("2018W313T121212.123Z", "basic_week_date_time"); assertSameDate("2018W313T121212.123456789Z", "basic_week_date_time"); assertSameDate("2018W313T121212.123+0100", "basic_week_date_time"); @@ -145,8 +151,10 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018-12-31T12:12:12", "date_hour_minute_second"); assertSameDate("2018-12-31T12:12:1", "date_hour_minute_second"); + assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_fraction"); assertSameDate("2018-12-31T12:12:12.123", "date_hour_minute_second_fraction"); assertSameDate("2018-12-31T12:12:12.123456789", "date_hour_minute_second_fraction"); + assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_millis"); assertSameDate("2018-12-31T12:12:12.123", "date_hour_minute_second_millis"); assertParseException("2018-12-31T12:12:12.123456789", "date_hour_minute_second_millis"); assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_millis"); @@ -157,11 +165,14 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018-05-30T20", "date_optional_time"); assertSameDate("2018-05-30T20:21", "date_optional_time"); assertSameDate("2018-05-30T20:21:23", "date_optional_time"); + assertSameDate("2018-05-30T20:21:23.1", "date_optional_time"); assertSameDate("2018-05-30T20:21:23.123", "date_optional_time"); assertSameDate("2018-05-30T20:21:23.123456789", "date_optional_time"); assertSameDate("2018-05-30T20:21:23.123Z", "date_optional_time"); assertSameDate("2018-05-30T20:21:23.123456789Z", "date_optional_time"); + assertSameDate("2018-05-30T20:21:23.1+0100", "date_optional_time"); assertSameDate("2018-05-30T20:21:23.123+0100", "date_optional_time"); + assertSameDate("2018-05-30T20:21:23.1+01:00", "date_optional_time"); assertSameDate("2018-05-30T20:21:23.123+01:00", "date_optional_time"); assertSameDate("2018-12-1", "date_optional_time"); assertSameDate("2018-12-31T10:15:30", "date_optional_time"); @@ -169,17 +180,23 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018-12-31T10:5:30", "date_optional_time"); assertSameDate("2018-12-31T1:15:30", "date_optional_time"); + assertSameDate("2018-12-31T10:15:30.1Z", "date_time"); assertSameDate("2018-12-31T10:15:30.123Z", "date_time"); assertSameDate("2018-12-31T10:15:30.123456789Z", "date_time"); + assertSameDate("2018-12-31T10:15:30.1+0100", "date_time"); assertSameDate("2018-12-31T10:15:30.123+0100", "date_time"); assertSameDate("2018-12-31T10:15:30.123+01:00", "date_time"); + assertSameDate("2018-12-31T10:15:30.1+01:00", "date_time"); assertSameDate("2018-12-31T10:15:30.11Z", "date_time"); assertSameDate("2018-12-31T10:15:30.11+0100", "date_time"); assertSameDate("2018-12-31T10:15:30.11+01:00", "date_time"); + assertSameDate("2018-12-31T10:15:3.1Z", "date_time"); assertSameDate("2018-12-31T10:15:3.123Z", "date_time"); assertSameDate("2018-12-31T10:15:3.123456789Z", "date_time"); + assertSameDate("2018-12-31T10:15:3.1+0100", "date_time"); assertSameDate("2018-12-31T10:15:3.123+0100", "date_time"); assertSameDate("2018-12-31T10:15:3.123+01:00", "date_time"); + assertSameDate("2018-12-31T10:15:3.1+01:00", "date_time"); assertSameDate("2018-12-31T10:15:30Z", "date_time_no_millis"); assertSameDate("2018-12-31T10:15:30+0100", "date_time_no_millis"); @@ -218,10 +235,12 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018-128", "ordinal_date"); assertSameDate("2018-1", "ordinal_date"); + assertSameDate("2018-128T10:15:30.1Z", "ordinal_date_time"); assertSameDate("2018-128T10:15:30.123Z", "ordinal_date_time"); assertSameDate("2018-128T10:15:30.123456789Z", "ordinal_date_time"); assertSameDate("2018-128T10:15:30.123+0100", "ordinal_date_time"); assertSameDate("2018-128T10:15:30.123+01:00", "ordinal_date_time"); + assertSameDate("2018-1T10:15:30.1Z", "ordinal_date_time"); assertSameDate("2018-1T10:15:30.123Z", "ordinal_date_time"); assertSameDate("2018-1T10:15:30.123456789Z", "ordinal_date_time"); assertSameDate("2018-1T10:15:30.123+0100", "ordinal_date_time"); @@ -234,16 +253,20 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018-1T10:15:30+0100", "ordinal_date_time_no_millis"); assertSameDate("2018-1T10:15:30+01:00", "ordinal_date_time_no_millis"); + assertSameDate("10:15:30.1Z", "time"); assertSameDate("10:15:30.123Z", "time"); assertSameDate("10:15:30.123456789Z", "time"); assertSameDate("10:15:30.123+0100", "time"); assertSameDate("10:15:30.123+01:00", "time"); + assertSameDate("1:15:30.1Z", "time"); assertSameDate("1:15:30.123Z", "time"); assertSameDate("1:15:30.123+0100", "time"); assertSameDate("1:15:30.123+01:00", "time"); + assertSameDate("10:1:30.1Z", "time"); assertSameDate("10:1:30.123Z", "time"); assertSameDate("10:1:30.123+0100", "time"); assertSameDate("10:1:30.123+01:00", "time"); + assertSameDate("10:15:3.1Z", "time"); assertSameDate("10:15:3.123Z", "time"); assertSameDate("10:15:3.123+0100", "time"); assertSameDate("10:15:3.123+01:00", "time"); @@ -267,10 +290,13 @@ public void testDuellingFormatsValidParsing() { assertSameDate("10:15:3+01:00", "time_no_millis"); assertParseException("10:15:3", "time_no_millis"); + assertSameDate("T10:15:30.1Z", "t_time"); assertSameDate("T10:15:30.123Z", "t_time"); assertSameDate("T10:15:30.123456789Z", "t_time"); + assertSameDate("T10:15:30.1+0100", "t_time"); assertSameDate("T10:15:30.123+0100", "t_time"); assertSameDate("T10:15:30.123+01:00", "t_time"); + assertSameDate("T10:15:30.1+01:00", "t_time"); assertSameDate("T1:15:30.123Z", "t_time"); assertSameDate("T1:15:30.123+0100", "t_time"); assertSameDate("T1:15:30.123+01:00", "t_time"); @@ -305,12 +331,18 @@ public void testDuellingFormatsValidParsing() { "Cannot parse \"2012-W1-8\": Value 8 for dayOfWeek must be in the range [1,7]"); assertJavaTimeParseException("2012-W1-8", "week_date"); + assertSameDate("2012-W48-6T10:15:30.1Z", "week_date_time"); assertSameDate("2012-W48-6T10:15:30.123Z", "week_date_time"); assertSameDate("2012-W48-6T10:15:30.123456789Z", "week_date_time"); + assertSameDate("2012-W48-6T10:15:30.1+0100", "week_date_time"); assertSameDate("2012-W48-6T10:15:30.123+0100", "week_date_time"); + assertSameDate("2012-W48-6T10:15:30.1+01:00", "week_date_time"); assertSameDate("2012-W48-6T10:15:30.123+01:00", "week_date_time"); + assertSameDate("2012-W1-6T10:15:30.1Z", "week_date_time"); assertSameDate("2012-W1-6T10:15:30.123Z", "week_date_time"); + assertSameDate("2012-W1-6T10:15:30.1+0100", "week_date_time"); assertSameDate("2012-W1-6T10:15:30.123+0100", "week_date_time"); + assertSameDate("2012-W1-6T10:15:30.1+01:00", "week_date_time"); assertSameDate("2012-W1-6T10:15:30.123+01:00", "week_date_time"); assertSameDate("2012-W48-6T10:15:30Z", "week_date_time_no_millis"); @@ -357,9 +389,12 @@ public void testExceptionWhenCompositeParsingFails(){ public void testDuelingStrictParsing() { assertSameDate("2018W313", "strict_basic_week_date"); assertParseException("18W313", "strict_basic_week_date"); + assertSameDate("2018W313T121212.1Z", "strict_basic_week_date_time"); assertSameDate("2018W313T121212.123Z", "strict_basic_week_date_time"); assertSameDate("2018W313T121212.123456789Z", "strict_basic_week_date_time"); + assertSameDate("2018W313T121212.1+0100", "strict_basic_week_date_time"); assertSameDate("2018W313T121212.123+0100", "strict_basic_week_date_time"); + assertSameDate("2018W313T121212.1+01:00", "strict_basic_week_date_time"); assertSameDate("2018W313T121212.123+01:00", "strict_basic_week_date_time"); assertParseException("2018W313T12128.123Z", "strict_basic_week_date_time"); assertParseException("2018W313T12128.123456789Z", "strict_basic_week_date_time"); @@ -387,6 +422,7 @@ public void testDuelingStrictParsing() { assertParseException("2018-12-31T8:3", "strict_date_hour_minute"); assertSameDate("2018-12-31T12:12:12", "strict_date_hour_minute_second"); assertParseException("2018-12-31T12:12:1", "strict_date_hour_minute_second"); + assertSameDate("2018-12-31T12:12:12.1", "strict_date_hour_minute_second_fraction"); assertSameDate("2018-12-31T12:12:12.123", "strict_date_hour_minute_second_fraction"); assertSameDate("2018-12-31T12:12:12.123456789", "strict_date_hour_minute_second_fraction"); assertSameDate("2018-12-31T12:12:12.123", "strict_date_hour_minute_second_millis"); @@ -407,9 +443,12 @@ public void testDuelingStrictParsing() { assertParseException("2018-12-31T10:5:30", "strict_date_optional_time"); assertParseException("2018-12-31T9:15:30", "strict_date_optional_time"); assertSameDate("2015-01-04T00:00Z", "strict_date_optional_time"); + assertSameDate("2018-12-31T10:15:30.1Z", "strict_date_time"); assertSameDate("2018-12-31T10:15:30.123Z", "strict_date_time"); assertSameDate("2018-12-31T10:15:30.123456789Z", "strict_date_time"); + assertSameDate("2018-12-31T10:15:30.1+0100", "strict_date_time"); assertSameDate("2018-12-31T10:15:30.123+0100", "strict_date_time"); + assertSameDate("2018-12-31T10:15:30.1+01:00", "strict_date_time"); assertSameDate("2018-12-31T10:15:30.123+01:00", "strict_date_time"); assertSameDate("2018-12-31T10:15:30.11Z", "strict_date_time"); assertSameDate("2018-12-31T10:15:30.11+0100", "strict_date_time"); @@ -442,9 +481,12 @@ public void testDuelingStrictParsing() { assertSameDate("2018-128", "strict_ordinal_date"); assertParseException("2018-1", "strict_ordinal_date"); + assertSameDate("2018-128T10:15:30.1Z", "strict_ordinal_date_time"); assertSameDate("2018-128T10:15:30.123Z", "strict_ordinal_date_time"); assertSameDate("2018-128T10:15:30.123456789Z", "strict_ordinal_date_time"); + assertSameDate("2018-128T10:15:30.1+0100", "strict_ordinal_date_time"); assertSameDate("2018-128T10:15:30.123+0100", "strict_ordinal_date_time"); + assertSameDate("2018-128T10:15:30.1+01:00", "strict_ordinal_date_time"); assertSameDate("2018-128T10:15:30.123+01:00", "strict_ordinal_date_time"); assertParseException("2018-1T10:15:30.123Z", "strict_ordinal_date_time"); @@ -453,6 +495,7 @@ public void testDuelingStrictParsing() { assertSameDate("2018-128T10:15:30+01:00", "strict_ordinal_date_time_no_millis"); assertParseException("2018-1T10:15:30Z", "strict_ordinal_date_time_no_millis"); + assertSameDate("10:15:30.1Z", "strict_time"); assertSameDate("10:15:30.123Z", "strict_time"); assertSameDate("10:15:30.123456789Z", "strict_time"); assertSameDate("10:15:30.123+0100", "strict_time"); @@ -474,9 +517,12 @@ public void testDuelingStrictParsing() { assertParseException("10:15:3Z", "strict_time_no_millis"); assertParseException("10:15:3", "strict_time_no_millis"); + assertSameDate("T10:15:30.1Z", "strict_t_time"); assertSameDate("T10:15:30.123Z", "strict_t_time"); assertSameDate("T10:15:30.123456789Z", "strict_t_time"); + assertSameDate("T10:15:30.1+0100", "strict_t_time"); assertSameDate("T10:15:30.123+0100", "strict_t_time"); + assertSameDate("T10:15:30.1+01:00", "strict_t_time"); assertSameDate("T10:15:30.123+01:00", "strict_t_time"); assertParseException("T1:15:30.123Z", "strict_t_time"); assertParseException("T10:1:30.123Z", "strict_t_time"); @@ -505,9 +551,12 @@ public void testDuelingStrictParsing() { "Cannot parse \"2012-W01-8\": Value 8 for dayOfWeek must be in the range [1,7]"); assertJavaTimeParseException("2012-W01-8", "strict_week_date"); + assertSameDate("2012-W48-6T10:15:30.1Z", "strict_week_date_time"); assertSameDate("2012-W48-6T10:15:30.123Z", "strict_week_date_time"); assertSameDate("2012-W48-6T10:15:30.123456789Z", "strict_week_date_time"); + assertSameDate("2012-W48-6T10:15:30.1+0100", "strict_week_date_time"); assertSameDate("2012-W48-6T10:15:30.123+0100", "strict_week_date_time"); + assertSameDate("2012-W48-6T10:15:30.1+01:00", "strict_week_date_time"); assertSameDate("2012-W48-6T10:15:30.123+01:00", "strict_week_date_time"); assertParseException("2012-W1-6T10:15:30.123Z", "strict_week_date_time"); diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 1f4e0bafe4a3b..22259b919ec6f 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -374,7 +374,6 @@ private static MetaData randomMetaDataForTx() { return builder.build(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/39077") public void testAtomicityWithFailures() throws IOException { try (NodeEnvironment env = newNodeEnvironment()) { MetaStateServiceWithFailures metaStateService = diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java index 72d890edc795d..9d118f3fc5c82 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.fielddata; +import org.elasticsearch.index.fielddata.ScriptDocValues.GeoPoints; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.test.ESTestCase; @@ -28,31 +29,30 @@ public class ScriptDocValuesGeoPointsTests extends ESTestCase { - private static MultiGeoPointValues wrap(final GeoPoint... points) { + private static MultiGeoPointValues wrap(GeoPoint[][] points) { return new MultiGeoPointValues() { - int docID = -1; + GeoPoint[] current; int i; @Override public GeoPoint nextValue() { - if (docID != 0) { - fail(); - } - return points[i++]; + return current[i++]; } @Override public boolean advanceExact(int docId) { - docID = docId; - return points.length > 0; + if (docId < points.length) { + current = points[docId]; + } else { + current = new GeoPoint[0]; + } + i = 0; + return current.length > 0; } @Override public int docValueCount() { - if (docID != 0) { - return 0; - } - return points.length; + return current.length; } }; } @@ -71,7 +71,8 @@ public void testGeoGetLatLon() throws IOException { final double lon1 = randomLon(); final double lon2 = randomLon(); - final MultiGeoPointValues values = wrap(new GeoPoint(lat1, lon1), new GeoPoint(lat2, lon2)); + GeoPoint[][] points = {{new GeoPoint(lat1, lon1), new GeoPoint(lat2, lon2)}}; + final MultiGeoPointValues values = wrap(points); final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values); script.setNextDocId(1); @@ -88,11 +89,13 @@ public void testGeoGetLatLon() throws IOException { public void testGeoDistance() throws IOException { final double lat = randomLat(); final double lon = randomLon(); - final MultiGeoPointValues values = wrap(new GeoPoint(lat, lon)); + GeoPoint[][] points = {{new GeoPoint(lat, lon)}}; + final MultiGeoPointValues values = wrap(points); final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values); script.setNextDocId(0); - final ScriptDocValues.GeoPoints emptyScript = new ScriptDocValues.GeoPoints(wrap()); + GeoPoint[][] points2 = {new GeoPoint[0]}; + final ScriptDocValues.GeoPoints emptyScript = new ScriptDocValues.GeoPoints(wrap(points2)); emptyScript.setNextDocId(0); final double otherLat = randomLat(); @@ -110,4 +113,32 @@ public void testGeoDistance() throws IOException { script.planeDistanceWithDefault(otherLat, otherLon, 42) / 1000d, 0.01); assertEquals(42, emptyScript.planeDistanceWithDefault(otherLat, otherLon, 42), 0); } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40684") + public void testMissingValues() throws IOException { + GeoPoint[][] points = new GeoPoint[between(3, 10)][]; + for (int d = 0; d < points.length; d++) { + points[d] = new GeoPoint[randomBoolean() ? 0 : between(1, 10)]; + } + final ScriptDocValues.GeoPoints geoPoints = new GeoPoints(wrap(points)); + for (int d = 0; d < points.length; d++) { + geoPoints.setNextDocId(d); + if (points[d].length > 0) { + assertEquals(points[d][0], geoPoints.getValue()); + } else { + Exception e = expectThrows(IllegalStateException.class, () -> geoPoints.getValue()); + assertEquals("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); + e = expectThrows(IllegalStateException.class, () -> geoPoints.get(0)); + assertEquals("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); + } + assertEquals(points[d].length, geoPoints.size()); + for (int i = 0; i < points[d].length; i++) { + assertEquals(points[d][i], geoPoints.get(i)); + } + } + } + + } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java index a5674e4da7d7d..c74725d3774b7 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java @@ -42,10 +42,14 @@ public void testLongs() throws IOException { longs.setNextDocId(d); if (values[d].length > 0) { assertEquals(values[d][0], longs.getValue()); + assertEquals(values[d][0], (long) longs.get(0)); } else { Exception e = expectThrows(IllegalStateException.class, () -> longs.getValue()); assertEquals("A document doesn't have a value for a field! " + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); + e = expectThrows(IllegalStateException.class, () -> longs.get(0)); + assertEquals("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); } assertEquals(values[d].length, longs.size()); for (int i = 0; i < values[d].length; i++) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 7314ecb1de7c2..449d17a5b9bda 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -78,6 +78,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.Is.is; public class TextFieldMapperTests extends ESSingleNodeTestCase { @@ -1084,4 +1085,95 @@ public void testFastPhrasePrefixes() throws IOException { assertThat(q, equalTo(mpq)); } } + + public void testSimpleMerge() throws IOException { + MapperService mapperService = createIndex("test_mapping_merge").mapperService(); + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .field("index_phrases", true) + .endObject() + .endObject() + .endObject().endObject()); + DocumentMapper mapper = mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapper.mappers().getMapper("a_field"), instanceOf(TextFieldMapper.class)); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .field("index_phrases", true) + .endObject() + .endObject() + .endObject().endObject()); + DocumentMapper mapper = mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapper.mappers().getMapper("a_field"), instanceOf(TextFieldMapper.class)); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes") + .field("min_chars", "3") + .endObject() + .field("index_phrases", true) + .endObject() + .endObject() + .endObject().endObject()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)); + assertThat(e.getMessage(), containsString("different [index_prefixes]")); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .field("index_phrases", false) + .endObject() + .endObject() + .endObject().endObject()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)); + assertThat(e.getMessage(), containsString("different [index_phrases]")); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .field("index_phrases", true) + .endObject() + .startObject("b_field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject().endObject()); + DocumentMapper mapper = mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapper.mappers().getMapper("a_field"), instanceOf(TextFieldMapper.class)); + assertThat(mapper.mappers().getMapper("b_field"), instanceOf(KeywordFieldMapper.class)); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 67106b04f8deb..da474e8d770b2 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2767,7 +2767,7 @@ public void testCompletionStatsMarksSearcherAccessed() throws Exception { }); long prevAccessTime = shard.getLastSearcherAccess(); indexShard.completionStats(); - assertThat("searcher was not marked as accessed", shard.getLastSearcherAccess(), greaterThan(prevAccessTime)); + assertThat("searcher was marked as accessed", shard.getLastSearcherAccess(), equalTo(prevAccessTime)); } finally { closeShards(indexShard); } @@ -2797,7 +2797,7 @@ public void testDocStats() throws Exception { }); long prevAccessTime = shard.getLastSearcherAccess(); final DocsStats docsStats = indexShard.docStats(); - assertThat("searcher was not marked as accessed", shard.getLastSearcherAccess(), greaterThan(prevAccessTime)); + assertThat("searcher was marked as accessed", shard.getLastSearcherAccess(), equalTo(prevAccessTime)); assertThat(docsStats.getCount(), equalTo(numDocs)); try (Engine.Searcher searcher = indexShard.acquireSearcher("test")) { assertTrue(searcher.reader().numDocs() <= docsStats.getCount()); diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 1f934aa00b3b8..28170c827b3e9 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -20,9 +20,8 @@ package org.elasticsearch.bootstrap; import com.carrotsearch.randomizedtesting.RandomizedRunner; - -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; @@ -30,6 +29,7 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.network.IfConfig; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.secure_sm.SecureSM; import org.junit.Assert; @@ -79,8 +79,10 @@ public class BootstrapForTesting { } // just like bootstrap, initialize natives, then SM + final boolean memoryLock = + BootstrapSettings.MEMORY_LOCK_SETTING.get(Settings.EMPTY); // use the default bootstrap.memory_lock setting final boolean systemCallFilter = Booleans.parseBoolean(System.getProperty("tests.system_call_filter", "true")); - Bootstrap.initializeNatives(javaTmpDir, true, systemCallFilter, true); + Bootstrap.initializeNatives(javaTmpDir, memoryLock, systemCallFilter, true); // initialize probes Bootstrap.initializeProbes(); diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java index 3e4e639dd01e3..21071f7cb5005 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java @@ -33,6 +33,7 @@ import java.nio.file.NoSuchFileException; import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; @@ -136,6 +137,23 @@ public void testDeleteBlob() throws IOException { } } + public void testDeleteBlobs() throws IOException { + try (BlobStore store = newBlobStore()) { + final List blobNames = Arrays.asList("foobar", "barfoo"); + final BlobContainer container = store.blobContainer(new BlobPath()); + container.deleteBlobsIgnoringIfNotExists(blobNames); // does not raise when blobs don't exist + byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); + final BytesArray bytesArray = new BytesArray(data); + for (String blobName : blobNames) { + writeBlob(container, blobName, bytesArray, randomBoolean()); + } + assertEquals(container.listBlobs().size(), 2); + container.deleteBlobsIgnoringIfNotExists(blobNames); + assertTrue(container.listBlobs().isEmpty()); + container.deleteBlobsIgnoringIfNotExists(blobNames); // does not raise when blobs don't exist + } + } + public void testDeleteBlobIgnoringIfNotExists() throws IOException { try (BlobStore store = newBlobStore()) { BlobPath blobPath = new BlobPath(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java index 73e639cec5e1d..154bd206ade66 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java @@ -33,7 +33,7 @@ public final class DataFrameField { public static final String REST_BASE_PATH = "/_data_frame/"; public static final String REST_BASE_PATH_TRANSFORMS = REST_BASE_PATH + "transforms/"; public static final String REST_BASE_PATH_TRANSFORMS_BY_ID = REST_BASE_PATH_TRANSFORMS + "{id}/"; - public static final String DATA_FRAME_TRANSFORM_AUDIT_ID_FIELD = "transform_id"; + public static final String TRANSFORM_ID = "transform_id"; // note: this is used to match tasks public static final String PERSISTENT_TASK_DESCRIPTION_PREFIX = "data_frame_"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java index 7fd633a1e9d0e..dbe789ca3aebf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java @@ -29,12 +29,16 @@ public class DataFrameMessages { public static final String DATA_FRAME_CONFIG_INVALID = "Data frame transform configuration is invalid [{0}]"; public static final String REST_DATA_FRAME_FAILED_TO_SERIALIZE_TRANSFORM = "Failed to serialise transform [{0}]"; + public static final String DATA_FRAME_FAILED_TO_PERSIST_STATS = "Failed to persist data frame statistics for transform [{0}]"; + public static final String DATA_FRAME_UNKNOWN_TRANSFORM_STATS = "Statistics for transform [{0}] could not be found"; public static final String FAILED_TO_CREATE_DESTINATION_INDEX = "Could not create destination index [{0}] for transform [{1}]"; public static final String FAILED_TO_LOAD_TRANSFORM_CONFIGURATION = "Failed to load data frame transform configuration for transform [{0}]"; public static final String FAILED_TO_PARSE_TRANSFORM_CONFIGURATION = "Failed to parse transform configuration for data frame transform [{0}]"; + public static final String FAILED_TO_PARSE_TRANSFORM_STATISTICS_CONFIGURATION = + "Failed to parse transform statistics for data frame transform [{0}]"; public static final String DATA_FRAME_TRANSFORM_CONFIGURATION_NO_TRANSFORM = "Data frame transform configuration must specify exactly 1 function"; public static final String DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY = diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java index f0e92aa36db2f..d91f7a1a06964 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java @@ -6,9 +6,9 @@ package org.elasticsearch.xpack.core.dataframe.action; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; @@ -20,14 +20,18 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; + public class GetDataFrameTransformsStatsAction extends Action { public static final GetDataFrameTransformsStatsAction INSTANCE = new GetDataFrameTransformsStatsAction(); @@ -43,6 +47,11 @@ public Response newResponse() { public static class Request extends BaseTasksRequest { private String id; + private PageParams pageParams = PageParams.defaultParams(); + + public static final int MAX_SIZE_RETURN = 1000; + // used internally to expand the queried id expression + private List expandedIds = Collections.emptyList(); public Request(String id) { if (Strings.isNullOrEmpty(id) || id.equals("*")) { @@ -55,36 +64,58 @@ public Request(String id) { public Request(StreamInput in) throws IOException { super(in); id = in.readString(); + expandedIds = in.readList(StreamInput::readString); + pageParams = in.readOptionalWriteable(PageParams::new); } @Override public boolean match(Task task) { - // If we are retrieving all the transforms, the task description does not contain the id - if (id.equals(MetaData.ALL)) { - return task.getDescription().startsWith(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX); - } - // Otherwise find the task by ID - return task.getDescription().equals(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); + // Only get tasks that we have expanded to + return expandedIds.stream() + .anyMatch(transformId -> task.getDescription().equals(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + transformId)); } public String getId() { return id; } + public List getExpandedIds() { + return expandedIds; + } + + public void setExpandedIds(List expandedIds) { + this.expandedIds = Collections.unmodifiableList(new ArrayList<>(expandedIds)); + } + + public final void setPageParams(PageParams pageParams) { + this.pageParams = pageParams; + } + + public final PageParams getPageParams() { + return pageParams; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); + out.writeStringCollection(expandedIds); + out.writeOptionalWriteable(pageParams); } @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException exception = null; + if (getPageParams() != null && getPageParams().getSize() > MAX_SIZE_RETURN) { + exception = addValidationError("Param [" + PageParams.SIZE.getPreferredName() + + "] has a max acceptable value of [" + MAX_SIZE_RETURN + "]", exception); + } + return exception; } @Override public int hashCode() { - return Objects.hash(id); + return Objects.hash(id, pageParams); } @Override @@ -96,7 +127,7 @@ public boolean equals(Object obj) { return false; } Request other = (Request) obj; - return Objects.equals(id, other.id); + return Objects.equals(id, other.id) && Objects.equals(pageParams, other.pageParams); } } @@ -109,7 +140,7 @@ public Response(List transformsStateAndStats) { } public Response(List transformsStateAndStats, List taskFailures, - List nodeFailures) { + List nodeFailures) { super(taskFailures, nodeFailures); this.transformsStateAndStats = transformsStateAndStats; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/notifications/DataFrameAuditMessage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/notifications/DataFrameAuditMessage.java index 7dab9be6ab3cc..e6ac6cbc57b15 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/notifications/DataFrameAuditMessage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/notifications/DataFrameAuditMessage.java @@ -11,17 +11,17 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessage; import org.elasticsearch.xpack.core.common.notifications.Level; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; import java.util.Date; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.xpack.core.dataframe.DataFrameField.DATA_FRAME_TRANSFORM_AUDIT_ID_FIELD; public class DataFrameAuditMessage extends AbstractAuditMessage { - private static final ParseField TRANSFORM_ID = new ParseField(DATA_FRAME_TRANSFORM_AUDIT_ID_FIELD); + private static final ParseField TRANSFORM_ID = new ParseField(DataFrameField.TRANSFORM_ID); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "data_frame_audit_message", true, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStats.java index 9bb654b31d702..05cb6dea33ed9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStats.java @@ -6,35 +6,43 @@ package org.elasticsearch.xpack.core.dataframe.transforms; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import java.io.IOException; +import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; public class DataFrameIndexerTransformStats extends IndexerJobStats { - private static final String NAME = "data_frame_indexer_transform_stats"; - private static ParseField NUM_PAGES = new ParseField("pages_processed"); - private static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); - private static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed"); - private static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); - private static ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms"); - private static ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms"); - private static ParseField INDEX_TOTAL = new ParseField("index_total"); - private static ParseField SEARCH_TOTAL = new ParseField("search_total"); - private static ParseField SEARCH_FAILURES = new ParseField("search_failures"); - private static ParseField INDEX_FAILURES = new ParseField("index_failures"); + private static final String DEFAULT_TRANSFORM_ID = "_all"; + + public static final String NAME = "data_frame_indexer_transform_stats"; + public static ParseField NUM_PAGES = new ParseField("pages_processed"); + public static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); + public static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed"); + public static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); + public static ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms"); + public static ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms"); + public static ParseField INDEX_TOTAL = new ParseField("index_total"); + public static ParseField SEARCH_TOTAL = new ParseField("search_total"); + public static ParseField SEARCH_FAILURES = new ParseField("search_failures"); + public static ParseField INDEX_FAILURES = new ParseField("index_failures"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, args -> new DataFrameIndexerTransformStats((long) args[0], (long) args[1], (long) args[2], - (long) args[3], (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9])); + NAME, args -> new DataFrameIndexerTransformStats((String) args[0], (long) args[1], (long) args[2], (long) args[3], + (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9], (long) args[10])); static { + PARSER.declareString(optionalConstructorArg(), DataFrameField.ID); PARSER.declareLong(constructorArg(), NUM_PAGES); PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); @@ -45,20 +53,72 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats { PARSER.declareLong(constructorArg(), SEARCH_TOTAL); PARSER.declareLong(constructorArg(), INDEX_FAILURES); PARSER.declareLong(constructorArg(), SEARCH_FAILURES); + PARSER.declareString(optionalConstructorArg(), DataFrameField.INDEX_DOC_TYPE); + } + + private final String transformId; + + /** + * Certain situations call for a default transform ID, e.g. when merging many different transforms for statistics gather. + * + * The returned stats object cannot be stored in the index as the transformId does not refer to a real transform configuration + * + * @return new DataFrameIndexerTransformStats with empty stats and a default transform ID + */ + public static DataFrameIndexerTransformStats withDefaultTransformId() { + return new DataFrameIndexerTransformStats(DEFAULT_TRANSFORM_ID); } - public DataFrameIndexerTransformStats() { + public static DataFrameIndexerTransformStats withDefaultTransformId(long numPages, long numInputDocuments, long numOutputDocuments, + long numInvocations, long indexTime, long searchTime, + long indexTotal, long searchTotal, long indexFailures, + long searchFailures) { + return new DataFrameIndexerTransformStats(DEFAULT_TRANSFORM_ID, numPages, numInputDocuments, + numOutputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal, + indexFailures, searchFailures); + } + + public DataFrameIndexerTransformStats(String transformId) { super(); + this.transformId = Objects.requireNonNull(transformId, "parameter transformId must not be null"); + } + + public DataFrameIndexerTransformStats(String transformId, long numPages, long numInputDocuments, long numOutputDocuments, + long numInvocations, long indexTime, long searchTime, long indexTotal, long searchTotal, + long indexFailures, long searchFailures) { + super(numPages, numInputDocuments, numOutputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal, + indexFailures, searchFailures); + this.transformId = Objects.requireNonNull(transformId, "parameter transformId must not be null"); } - public DataFrameIndexerTransformStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, - long indexTime, long searchTime, long indexTotal, long searchTotal, long indexFailures, long searchFailures) { - super(numPages, numInputDocuments, numOuputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal, indexFailures, - searchFailures); + public DataFrameIndexerTransformStats(DataFrameIndexerTransformStats other) { + this(other.transformId, other.numPages, other.numInputDocuments, other.numOuputDocuments, other.numInvocations, + other.indexTime, other.searchTime, other.indexTotal, other.searchTotal, other.indexFailures, other.searchFailures); } public DataFrameIndexerTransformStats(StreamInput in) throws IOException { super(in); + transformId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(transformId); + } + + /** + * Get the persisted stats document name from the Data Frame Transformer Id. + * + * @return The id of document the where the transform stats are persisted + */ + public static String documentId(String transformId) { + return NAME + "-" + transformId; + } + + @Nullable + public String getTransformId() { + return transformId; } @Override @@ -74,11 +134,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(SEARCH_TIME_IN_MS.getPreferredName(), searchTime); builder.field(SEARCH_TOTAL.getPreferredName(), searchTotal); builder.field(SEARCH_FAILURES.getPreferredName(), searchFailures); + if (params.paramAsBoolean(DataFrameField.FOR_INTERNAL_STORAGE, false)) { + // If we are storing something, it should have a valid transform ID. + if (transformId.equals(DEFAULT_TRANSFORM_ID)) { + throw new IllegalArgumentException("when storing transform statistics, a valid transform id must be provided"); + } + builder.field(DataFrameField.ID.getPreferredName(), transformId); + builder.field(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), NAME); + } builder.endObject(); return builder; } public DataFrameIndexerTransformStats merge(DataFrameIndexerTransformStats other) { + // We should probably not merge two sets of stats unless one is an accumulation object (i.e. with the default transform id) + // or the stats are referencing the same transform + assert transformId.equals(DEFAULT_TRANSFORM_ID) || this.transformId.equals(other.transformId); numPages += other.numPages; numInputDocuments += other.numInputDocuments; numOuputDocuments += other.numOuputDocuments; @@ -93,6 +164,37 @@ public DataFrameIndexerTransformStats merge(DataFrameIndexerTransformStats other return this; } + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DataFrameIndexerTransformStats that = (DataFrameIndexerTransformStats) other; + + return Objects.equals(this.transformId, that.transformId) + && Objects.equals(this.numPages, that.numPages) + && Objects.equals(this.numInputDocuments, that.numInputDocuments) + && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) + && Objects.equals(this.numInvocations, that.numInvocations) + && Objects.equals(this.indexTime, that.indexTime) + && Objects.equals(this.searchTime, that.searchTime) + && Objects.equals(this.indexFailures, that.indexFailures) + && Objects.equals(this.searchFailures, that.searchFailures) + && Objects.equals(this.indexTotal, that.indexTotal) + && Objects.equals(this.searchTotal, that.searchTotal); + } + + @Override + public int hashCode() { + return Objects.hash(transformId, numPages, numInputDocuments, numOuputDocuments, numInvocations, + indexTime, searchTime, indexFailures, searchFailures, indexTotal, searchTotal); + } + public static DataFrameIndexerTransformStats fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStats.java index e155998aa2e08..dc58be9e6638a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStats.java @@ -40,9 +40,13 @@ public class DataFrameTransformStateAndStats implements Writeable, ToXContentObj } public static DataFrameTransformStateAndStats initialStateAndStats(String id) { + return initialStateAndStats(id, new DataFrameIndexerTransformStats(id)); + } + + public static DataFrameTransformStateAndStats initialStateAndStats(String id, DataFrameIndexerTransformStats indexerTransformStats) { return new DataFrameTransformStateAndStats(id, new DataFrameTransformState(DataFrameTransformTaskState.STOPPED, IndexerState.STOPPED, null, 0L, null), - new DataFrameIndexerTransformStats()); + indexerTransformStats); } public DataFrameTransformStateAndStats(String id, DataFrameTransformState state, DataFrameIndexerTransformStats stats) { @@ -62,7 +66,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(DataFrameField.ID.getPreferredName(), id); builder.field(STATE_FIELD.getPreferredName(), transformState); - builder.field(DataFrameField.STATS_FIELD.getPreferredName(), transformStats); + builder.field(DataFrameField.STATS_FIELD.getPreferredName(), transformStats, params); builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java index 7982cffb01de5..8585e4122e673 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java @@ -73,7 +73,8 @@ static AggProvider fromStream(StreamInput in) throws IOException { } else if (in.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects return new AggProvider(in.readMap(), null, null); } else { // only supports eagerly parsed objects - return AggProvider.fromParsedAggs(in.readOptionalWriteable(AggregatorFactories.Builder::new)); + // Upstream, we have read the bool already and know for sure that we have parsed aggs in the stream + return AggProvider.fromParsedAggs(new AggregatorFactories.Builder(in)); } } @@ -111,7 +112,8 @@ public void writeTo(StreamOutput out) throws IOException { // actually are aggregations defined throw new ElasticsearchException("Unsupported operation: parsed aggregations are null"); } - out.writeOptionalWriteable(parsedAggs); + // Upstream we already verified that this calling object is not null, no need to write a second boolean to the stream + parsedAggs.writeTo(out); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 3cd071f61aaee..810d97df34636 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -212,6 +212,7 @@ public DatafeedConfig(StreamInput in) throws IOException { } // each of these writables are version aware this.queryProvider = QueryProvider.fromStream(in); + // This reads a boolean from the stream, if true, it sends the stream to the `fromStream` method this.aggProvider = in.readOptionalWriteable(AggProvider::fromStream); if (in.readBoolean()) { @@ -420,6 +421,7 @@ public void writeTo(StreamOutput out) throws IOException { // Each of these writables are version aware queryProvider.writeTo(out); // never null + // This writes a boolean to the stream, if true, it sends the stream to the `writeTo` method out.writeOptionalWriteable(aggProvider); if (scriptFields != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java index a06eacefcf81f..618414426f085 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java @@ -107,6 +107,10 @@ protected void innerXContent(XContentBuilder builder, Params params) throws IOEx builder.field(AUDIT_XFIELD, auditUsage); builder.field(IP_FILTER_XFIELD, ipFilterUsage); builder.field(ANONYMOUS_XFIELD, anonymousUsage); + } else if (sslUsage.isEmpty() == false) { + // A trial (or basic) license can have SSL without security. + // This is because security defaults to disabled on that license, but that dynamic-default does not disable SSL. + builder.field(SSL_XFIELD, sslUsage); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java index 168adaa111658..ae036b63162f0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java @@ -137,7 +137,7 @@ public void readFrom(StreamInput in) throws IOException { this.name = in.readString(); this.enabled = in.readBoolean(); this.roles = in.readStringList(); - if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + if (in.getVersion().onOrAfter(Version.V_7_1_0)) { this.roleTemplates = in.readList(TemplateRoleName::new); } this.rules = ExpressionParser.readExpression(in); @@ -151,7 +151,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeBoolean(enabled); out.writeStringCollection(roles); - if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + if (out.getVersion().onOrAfter(Version.V_7_1_0)) { out.writeList(roleTemplates); } ExpressionParser.writeExpression(rules, out); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java index dd5fb08fa14b7..1564b46760ce4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/ExpressionRoleMapping.java @@ -91,7 +91,7 @@ public ExpressionRoleMapping(StreamInput in) throws IOException { this.name = in.readString(); this.enabled = in.readBoolean(); this.roles = in.readStringList(); - if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + if (in.getVersion().onOrAfter(Version.V_7_1_0)) { this.roleTemplates = in.readList(TemplateRoleName::new); } else { this.roleTemplates = Collections.emptyList(); @@ -105,7 +105,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeBoolean(enabled); out.writeStringCollection(roles); - if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + if (out.getVersion().onOrAfter(Version.V_7_1_0)) { out.writeList(roleTemplates); } ExpressionParser.writeExpression(expression, out); diff --git a/x-pack/plugin/core/src/main/resources/security-index-template.json b/x-pack/plugin/core/src/main/resources/security-index-template.json index f4e3cd6db020d..8d567df5a514f 100644 --- a/x-pack/plugin/core/src/main/resources/security-index-template.json +++ b/x-pack/plugin/core/src/main/resources/security-index-template.json @@ -69,7 +69,7 @@ }, "metadata" : { "type" : "object", - "dynamic" : true + "dynamic" : false }, "enabled": { "type": "boolean" @@ -189,7 +189,7 @@ }, "metadata" : { "type" : "object", - "dynamic" : true + "dynamic" : false }, "realm" : { "type" : "keyword" @@ -198,7 +198,7 @@ }, "rules" : { "type" : "object", - "dynamic" : true + "dynamic" : false }, "refresh_token" : { "type" : "object", @@ -253,7 +253,7 @@ }, "metadata" : { "type" : "object", - "dynamic" : true + "dynamic" : false }, "authentication" : { "type" : "binary" diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStatsTests.java index 45b44882d7861..54fc5d5d45dc5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStatsTests.java @@ -7,12 +7,19 @@ package org.elasticsearch.xpack.core.dataframe.transforms; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import java.io.IOException; +import java.util.Collections; public class DataFrameIndexerTransformStatsTests extends AbstractSerializingTestCase { + + protected static ToXContent.Params TO_XCONTENT_PARAMS = new ToXContent.MapParams( + Collections.singletonMap(DataFrameField.FOR_INTERNAL_STORAGE, "true")); + @Override protected DataFrameIndexerTransformStats createTestInstance() { return randomStats(); @@ -29,21 +36,32 @@ protected DataFrameIndexerTransformStats doParseInstance(XContentParser parser) } public static DataFrameIndexerTransformStats randomStats() { - return new DataFrameIndexerTransformStats(randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), - randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), - randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L)); + return randomStats(randomAlphaOfLength(10)); + } + + public static DataFrameIndexerTransformStats randomStats(String transformId) { + return new DataFrameIndexerTransformStats(transformId, randomLongBetween(10L, 10000L), + randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), + randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), + randomLongBetween(0L, 10000L)); + } + + @Override + protected ToXContent.Params getToXContentParams() { + return TO_XCONTENT_PARAMS; } public void testMerge() throws IOException { - DataFrameIndexerTransformStats emptyStats = new DataFrameIndexerTransformStats(); - DataFrameIndexerTransformStats randomStats = randomStats(); + String transformId = randomAlphaOfLength(10); + DataFrameIndexerTransformStats emptyStats = new DataFrameIndexerTransformStats(transformId); + DataFrameIndexerTransformStats randomStats = randomStats(transformId); assertEquals(randomStats, emptyStats.merge(randomStats)); assertEquals(randomStats, randomStats.merge(emptyStats)); DataFrameIndexerTransformStats randomStatsClone = copyInstance(randomStats); - DataFrameIndexerTransformStats trippleRandomStats = new DataFrameIndexerTransformStats(3 * randomStats.getNumPages(), + DataFrameIndexerTransformStats trippleRandomStats = new DataFrameIndexerTransformStats(transformId, 3 * randomStats.getNumPages(), 3 * randomStats.getNumDocuments(), 3 * randomStats.getOutputDocuments(), 3 * randomStats.getNumInvocations(), 3 * randomStats.getIndexTime(), 3 * randomStats.getSearchTime(), 3 * randomStats.getIndexTotal(), 3 * randomStats.getSearchTotal(), 3 * randomStats.getIndexFailures(), 3 * randomStats.getSearchFailures()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStatsTests.java index 266967e27b903..4f80d0d0b453c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStatsTests.java @@ -7,22 +7,26 @@ package org.elasticsearch.xpack.core.dataframe.transforms; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import java.io.IOException; +import java.util.Collections; public class DataFrameTransformStateAndStatsTests extends AbstractSerializingDataFrameTestCase { + protected static ToXContent.Params TO_XCONTENT_PARAMS = new ToXContent.MapParams( + Collections.singletonMap(DataFrameField.FOR_INTERNAL_STORAGE, "true")); + public static DataFrameTransformStateAndStats randomDataFrameTransformStateAndStats(String id) { return new DataFrameTransformStateAndStats(id, DataFrameTransformStateTests.randomDataFrameTransformState(), - DataFrameIndexerTransformStatsTests.randomStats()); + DataFrameIndexerTransformStatsTests.randomStats(id)); } public static DataFrameTransformStateAndStats randomDataFrameTransformStateAndStats() { - return new DataFrameTransformStateAndStats(randomAlphaOfLengthBetween(1, 10), - DataFrameTransformStateTests.randomDataFrameTransformState(), - DataFrameIndexerTransformStatsTests.randomStats()); + return randomDataFrameTransformStateAndStats(randomAlphaOfLengthBetween(1, 10)); } @Override @@ -30,6 +34,13 @@ protected DataFrameTransformStateAndStats doParseInstance(XContentParser parser) return DataFrameTransformStateAndStats.PARSER.apply(parser, null); } + @Override + // Setting params for internal storage so that we can check XContent equivalence as + // DataFrameIndexerTransformStats does not write the ID to the XContentObject unless it is for internal storage + protected ToXContent.Params getToXContentParams() { + return TO_XCONTENT_PARAMS; + } + @Override protected DataFrameTransformStateAndStats createTestInstance() { return randomDataFrameTransformStateAndStats(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateHlrcTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateHlrcTests.java new file mode 100644 index 0000000000000..4f1c6b1f7615e --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateHlrcTests.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe.transforms; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.AbstractHlrcXContentTestCase; +import org.elasticsearch.xpack.core.indexing.IndexerState; + +import java.io.IOException; +import java.util.function.Predicate; + +public class DataFrameTransformStateHlrcTests extends AbstractHlrcXContentTestCase { + + @Override + public org.elasticsearch.client.dataframe.transforms.DataFrameTransformState doHlrcParseInstance(XContentParser parser) + throws IOException { + return org.elasticsearch.client.dataframe.transforms.DataFrameTransformState.fromXContent(parser); + } + + @Override + public DataFrameTransformState convertHlrcToInternal(org.elasticsearch.client.dataframe.transforms.DataFrameTransformState instance) { + return new DataFrameTransformState(DataFrameTransformTaskState.fromString(instance.getTaskState().value()), + IndexerState.fromString(instance.getIndexerState().value()), + instance.getPosition(), instance.getGeneration(), instance.getReason()); + } + + @Override + protected DataFrameTransformState createTestInstance() { + return DataFrameTransformStateTests.randomDataFrameTransformState(); + } + + @Override + protected DataFrameTransformState doParseInstance(XContentParser parser) throws IOException { + return DataFrameTransformState.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> field.equals("current_position"); + } +} diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java index 2367e255cd9ba..9884c9bb6793b 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java @@ -17,7 +17,9 @@ import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; public class DataFrameAuditorIT extends DataFrameRestTestCase { @@ -49,7 +51,6 @@ public void createIndexes() throws IOException { setupUser(TEST_USER_NAME, Arrays.asList("data_frame_transforms_admin", DATA_ACCESS_ROLE)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40594") @SuppressWarnings("unchecked") public void testAuditorWritesAudits() throws Exception { String transformId = "simplePivotForAudit"; @@ -62,17 +63,26 @@ public void testAuditorWritesAudits() throws Exception { startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); // Make sure we wrote to the audit - assertTrue(indexExists(DataFrameInternalIndex.AUDIT_INDEX)); - refreshIndex(DataFrameInternalIndex.AUDIT_INDEX); - Request request = new Request("GET", DataFrameInternalIndex.AUDIT_INDEX + "/_search"); + final Request request = new Request("GET", DataFrameInternalIndex.AUDIT_INDEX + "/_search"); request.setJsonEntity("{\"query\":{\"term\":{\"transform_id\":\"simplePivotForAudit\"}}}"); - Map response = entityAsMap(client().performRequest(request)); - Map hitRsp = (Map) ((List) ((Map)response.get("hits")).get("hits")).get(0); - Map source = (Map)hitRsp.get("_source"); - assertThat(source.get("transform_id"), equalTo(transformId)); - assertThat(source.get("level"), equalTo("info")); - assertThat(source.get("message"), is(notNullValue())); - assertThat(source.get("node_name"), is(notNullValue())); - assertThat(source.get("timestamp"), is(notNullValue())); + assertBusy(() -> { + assertTrue(indexExists(DataFrameInternalIndex.AUDIT_INDEX)); + }); + // Since calls to write the Auditor are sent and forgot (async) we could have returned from the start, + // finished the job (as this is a very short DF job), all without the audit being fully written. + assertBusy(() -> { + refreshIndex(DataFrameInternalIndex.AUDIT_INDEX); + Map response = entityAsMap(client().performRequest(request)); + List hitList = ((List) ((Map)response.get("hits")).get("hits")); + assertThat(hitList, is(not(empty()))); + Map hitRsp = (Map) hitList.get(0); + Map source = (Map)hitRsp.get("_source"); + assertThat(source.get("transform_id"), equalTo(transformId)); + assertThat(source.get("level"), equalTo("info")); + assertThat(source.get("message"), is(notNullValue())); + assertThat(source.get("node_name"), is(notNullValue())); + assertThat(source.get("timestamp"), is(notNullValue())); + }); + } } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java index 590d81c6e08b5..72176933b2a73 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameGetAndGetStatsIT.java @@ -8,13 +8,17 @@ import org.elasticsearch.client.Request; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.junit.After; import org.junit.Before; import java.io.IOException; import java.util.Collections; +import java.util.List; import java.util.Map; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.hamcrest.Matchers.greaterThan; public class DataFrameGetAndGetStatsIT extends DataFrameRestTestCase { @@ -47,6 +51,11 @@ public void createIndexes() throws IOException { setupUser(TEST_ADMIN_USER_NAME, Collections.singletonList("data_frame_transforms_admin")); } + @After + public void clearOutTransforms() throws Exception { + wipeDataFrameTransforms(); + } + public void testGetAndGetStats() throws Exception { createPivotReviewsTransform("pivot_1", "pivot_reviews_1", null); createPivotReviewsTransform("pivot_2", "pivot_reviews_2", null); @@ -67,6 +76,12 @@ public void testGetAndGetStats() throws Exception { getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "*/_stats", authHeader); stats = entityAsMap(client().performRequest(getRequest)); assertEquals(2, XContentMapValues.extractValue("count", stats)); + getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "pivot_1,pivot_2/_stats", authHeader); + stats = entityAsMap(client().performRequest(getRequest)); + assertEquals(2, XContentMapValues.extractValue("count", stats)); + getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "pivot_*/_stats", authHeader); + stats = entityAsMap(client().performRequest(getRequest)); + assertEquals(2, XContentMapValues.extractValue("count", stats)); // only pivot_1 getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "pivot_1/_stats", authHeader); @@ -89,4 +104,35 @@ public void testGetAndGetStats() throws Exception { transforms = entityAsMap(client().performRequest(getRequest)); assertEquals(1, XContentMapValues.extractValue("count", transforms)); } + + @SuppressWarnings("unchecked") + public void testGetPersistedStatsWithoutTask() throws Exception { + createPivotReviewsTransform("pivot_stats_1", "pivot_reviews_stats_1", null); + startAndWaitForTransform("pivot_stats_1", "pivot_reviews_stats_1"); + stopDataFrameTransform("pivot_stats_1", false); + + // Get rid of the first transform task, but keep the configuration + client().performRequest(new Request("POST", "_tasks/_cancel?actions="+DataFrameField.TASK_NAME+"*")); + + // Verify that the task is gone + Map tasks = + entityAsMap(client().performRequest(new Request("GET", "_tasks?actions="+DataFrameField.TASK_NAME+"*"))); + assertTrue(((Map)XContentMapValues.extractValue("nodes", tasks)).isEmpty()); + + createPivotReviewsTransform("pivot_stats_2", "pivot_reviews_stats_2", null); + startAndWaitForTransform("pivot_stats_2", "pivot_reviews_stats_2"); + + Request getRequest = createRequestWithAuth("GET", DATAFRAME_ENDPOINT + "_stats", BASIC_AUTH_VALUE_DATA_FRAME_ADMIN); + Map stats = entityAsMap(client().performRequest(getRequest)); + assertEquals(2, XContentMapValues.extractValue("count", stats)); + List> transformsStats = (List>)XContentMapValues.extractValue("transforms", stats); + // Verify that both transforms, the one with the task and the one without have statistics + for (Map transformStats : transformsStats) { + Map stat = (Map)transformStats.get("stats"); + assertThat(((Integer)stat.get("documents_processed")), greaterThan(0)); + assertThat(((Integer)stat.get("search_time_in_ms")), greaterThan(0)); + assertThat(((Integer)stat.get("search_total")), greaterThan(0)); + assertThat(((Integer)stat.get("pages_processed")), greaterThan(0)); + } + } } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index 99c08f1a50583..95daf11f674d3 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -268,7 +268,6 @@ public void testPreviewTransform() throws Exception { }); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40537") public void testPivotWithMaxOnDateField() throws Exception { String transformId = "simpleDateHistogramPivotWithMaxTime"; String dataFrameIndex = "pivot_reviews_via_date_histogram_with_max_time"; @@ -312,7 +311,7 @@ public void testPivotWithMaxOnDateField() throws Exception { Map searchResult = getAsMap(dataFrameIndex + "/_search?q=by_day:2017-01-15"); String actual = (String) ((List) XContentMapValues.extractValue("hits.hits._source.timestamp", searchResult)).get(0); // Do `containsString` as actual ending timestamp is indeterminate due to how data is generated - assertThat(actual, containsString("2017-01-15T20:")); + assertThat(actual, containsString("2017-01-15T")); } private void assertOnePivotValue(String query, double expected) throws IOException { diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java index 5fcdba603eebb..24ce173b37567 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java @@ -9,11 +9,19 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; import org.junit.Before; import java.io.IOException; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.core.dataframe.DataFrameField.INDEX_DOC_TYPE; +import static org.elasticsearch.xpack.dataframe.DataFrameFeatureSet.PROVIDED_STATS; + public class DataFrameUsageIT extends DataFrameRestTestCase { private boolean indicesCreated = false; @@ -45,22 +53,63 @@ public void testUsage() throws Exception { assertEquals(null, XContentMapValues.extractValue("data_frame.transforms", usageAsMap)); assertEquals(null, XContentMapValues.extractValue("data_frame.stats", usageAsMap)); - // create a transform + // create transforms createPivotReviewsTransform("test_usage", "pivot_reviews", null); + createPivotReviewsTransform("test_usage_no_task", "pivot_reviews_no_task", null); + createPivotReviewsTransform("test_usage_no_stats_or_task", "pivot_reviews_no_stats_or_task", null); usageResponse = client().performRequest(new Request("GET", "_xpack/usage")); usageAsMap = entityAsMap(usageResponse); - assertEquals(1, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); - assertEquals(1, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap)); + assertEquals(3, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); + assertEquals(3, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap)); + + startAndWaitForTransform("test_usage_no_task", "pivot_reviews_no_task"); + stopDataFrameTransform("test_usage_no_task", false); + // Remove the task, we should still have the transform and its stat doc + client().performRequest(new Request("POST", "_tasks/_cancel?actions="+ DataFrameField.TASK_NAME+"*")); - // TODO remove as soon as stats are stored in an index instead of ClusterState with the task startAndWaitForTransform("test_usage", "pivot_reviews"); + Request statsExistsRequest = new Request("GET", + DataFrameInternalIndex.INDEX_NAME+"/_search?q=" + + INDEX_DOC_TYPE.getPreferredName() + ":" + + DataFrameIndexerTransformStats.NAME); + // Verify that we have our two stats documents + assertBusy(() -> { + Map hasStatsMap = entityAsMap(client().performRequest(statsExistsRequest)); + assertEquals(2, XContentMapValues.extractValue("hits.total.value", hasStatsMap)); + }); + + Request getRequest = new Request("GET", DATAFRAME_ENDPOINT + "test_usage/_stats"); + Map stats = entityAsMap(client().performRequest(getRequest)); + Map expectedStats = new HashMap<>(); + for(String statName : PROVIDED_STATS) { + @SuppressWarnings("unchecked") + List specificStatistic = ((List)XContentMapValues.extractValue("transforms.stats." + statName, stats)); + assertNotNull(specificStatistic); + Integer statistic = (specificStatistic).get(0); + expectedStats.put(statName, statistic); + } + + getRequest = new Request("GET", DATAFRAME_ENDPOINT + "test_usage_no_task/_stats"); + stats = entityAsMap(client().performRequest(getRequest)); + for(String statName : PROVIDED_STATS) { + @SuppressWarnings("unchecked") + List specificStatistic = ((List)XContentMapValues.extractValue("transforms.stats." + statName, stats)); + assertNotNull(specificStatistic); + Integer statistic = (specificStatistic).get(0); + expectedStats.merge(statName, statistic, Integer::sum); + } + + usageResponse = client().performRequest(new Request("GET", "_xpack/usage")); usageAsMap = entityAsMap(usageResponse); // we should see some stats - assertEquals(1, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); + assertEquals(3, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); assertEquals(1, XContentMapValues.extractValue("data_frame.transforms.started", usageAsMap)); - assertEquals(0, XContentMapValues.extractValue("data_frame.stats.index_failures", usageAsMap)); + assertEquals(2, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap)); + for(String statName : PROVIDED_STATS) { + assertEquals(expectedStats.get(statName), XContentMapValues.extractValue("data_frame.stats."+statName, usageAsMap)); + } } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java index 2cb7d59475444..029fe88766df5 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java @@ -6,21 +6,41 @@ package org.elasticsearch.xpack.dataframe; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.dataframe.DataFrameFeatureSetUsage; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; -import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; - +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransform; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -33,11 +53,28 @@ public class DataFrameFeatureSet implements XPackFeatureSet { private final boolean enabled; private final Client client; private final XPackLicenseState licenseState; + private final ClusterService clusterService; + + private static final Logger logger = LogManager.getLogger(DataFrameFeatureSet.class); + + public static final String[] PROVIDED_STATS = new String[] { + DataFrameIndexerTransformStats.NUM_PAGES.getPreferredName(), + DataFrameIndexerTransformStats.NUM_INPUT_DOCUMENTS.getPreferredName(), + DataFrameIndexerTransformStats.NUM_OUTPUT_DOCUMENTS.getPreferredName(), + DataFrameIndexerTransformStats.NUM_INVOCATIONS.getPreferredName(), + DataFrameIndexerTransformStats.INDEX_TIME_IN_MS.getPreferredName(), + DataFrameIndexerTransformStats.SEARCH_TIME_IN_MS.getPreferredName(), + DataFrameIndexerTransformStats.INDEX_TOTAL.getPreferredName(), + DataFrameIndexerTransformStats.SEARCH_TOTAL.getPreferredName(), + DataFrameIndexerTransformStats.INDEX_FAILURES.getPreferredName(), + DataFrameIndexerTransformStats.SEARCH_FAILURES.getPreferredName(), + }; @Inject - public DataFrameFeatureSet(Settings settings, Client client, @Nullable XPackLicenseState licenseState) { + public DataFrameFeatureSet(Settings settings, ClusterService clusterService, Client client, @Nullable XPackLicenseState licenseState) { this.enabled = XPackSettings.DATA_FRAME_ENABLED.get(settings); this.client = Objects.requireNonNull(client); + this.clusterService = Objects.requireNonNull(clusterService); this.licenseState = licenseState; } @@ -69,30 +106,127 @@ public Map nativeCodeInfo() { @Override public void usage(ActionListener listener) { if (enabled == false) { - listener.onResponse( - new DataFrameFeatureSetUsage(available(), enabled(), Collections.emptyMap(), new DataFrameIndexerTransformStats())); + listener.onResponse(new DataFrameFeatureSetUsage(available(), + enabled(), + Collections.emptyMap(), + DataFrameIndexerTransformStats.withDefaultTransformId())); return; } - final GetDataFrameTransformsStatsAction.Request transformStatsRequest = new GetDataFrameTransformsStatsAction.Request(MetaData.ALL); - client.execute(GetDataFrameTransformsStatsAction.INSTANCE, - transformStatsRequest, - ActionListener.wrap(transformStatsResponse -> - listener.onResponse(createUsage(available(), enabled(), transformStatsResponse.getTransformsStateAndStats())), - listener::onFailure)); + PersistentTasksCustomMetaData taskMetadata = PersistentTasksCustomMetaData.getPersistentTasksCustomMetaData(clusterService.state()); + Collection> dataFrameTasks = taskMetadata == null ? + Collections.emptyList() : + taskMetadata.findTasks(DataFrameTransform.NAME, (t) -> true); + final int taskCount = dataFrameTasks.size(); + final Map transformsCountByState = new HashMap<>(); + for(PersistentTasksCustomMetaData.PersistentTask dataFrameTask : dataFrameTasks) { + DataFrameTransformState state = (DataFrameTransformState)dataFrameTask.getState(); + transformsCountByState.merge(state.getTaskState().value(), 1L, Long::sum); + } + + ActionListener totalStatsListener = ActionListener.wrap( + statSummations -> listener.onResponse(new DataFrameFeatureSetUsage(available(), + enabled(), + transformsCountByState, + statSummations)), + listener::onFailure + ); + + ActionListener totalTransformCountListener = ActionListener.wrap( + transformCountSuccess -> { + if (transformCountSuccess.getShardFailures().length > 0) { + logger.error("total transform count search returned shard failures: {}", + Arrays.toString(transformCountSuccess.getShardFailures())); + } + long totalTransforms = transformCountSuccess.getHits().getTotalHits().value; + if (totalTransforms == 0) { + listener.onResponse(new DataFrameFeatureSetUsage(available(), + enabled(), + transformsCountByState, + DataFrameIndexerTransformStats.withDefaultTransformId())); + return; + } + transformsCountByState.merge(DataFrameTransformTaskState.STOPPED.value(), totalTransforms - taskCount, Long::sum); + getStatisticSummations(client, totalStatsListener); + }, + transformCountFailure -> { + if (transformCountFailure instanceof ResourceNotFoundException) { + getStatisticSummations(client, totalStatsListener); + } else { + listener.onFailure(transformCountFailure); + } + } + ); + + SearchRequest totalTransformCount = client.prepareSearch(DataFrameInternalIndex.INDEX_NAME) + .setTrackTotalHits(true) + .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), DataFrameTransformConfig.NAME)))) + .request(); + + ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ClientHelper.DATA_FRAME_ORIGIN, + totalTransformCount, + totalTransformCountListener, + client::search); } - static DataFrameFeatureSetUsage createUsage(boolean available, - boolean enabled, - List transformsStateAndStats) { + static DataFrameIndexerTransformStats parseSearchAggs(SearchResponse searchResponse) { + List statisticsList = new ArrayList<>(PROVIDED_STATS.length); - Map transformsCountByState = new HashMap<>(); - DataFrameIndexerTransformStats accumulatedStats = new DataFrameIndexerTransformStats(); - transformsStateAndStats.forEach(singleResult -> { - transformsCountByState.merge(singleResult.getTransformState().getIndexerState().value(), 1L, Long::sum); - accumulatedStats.merge(singleResult.getTransformStats()); - }); + for(String statName : PROVIDED_STATS) { + Aggregation agg = searchResponse.getAggregations().get(statName); + if (agg instanceof NumericMetricsAggregation.SingleValue) { + statisticsList.add((long)((NumericMetricsAggregation.SingleValue)agg).value()); + } else { + statisticsList.add(0L); + } + } + return DataFrameIndexerTransformStats.withDefaultTransformId(statisticsList.get(0), // numPages + statisticsList.get(1), // numInputDocuments + statisticsList.get(2), // numOutputDocuments + statisticsList.get(3), // numInvocations + statisticsList.get(4), // indexTime + statisticsList.get(5), // searchTime + statisticsList.get(6), // indexTotal + statisticsList.get(7), // searchTotal + statisticsList.get(8), // indexFailures + statisticsList.get(9)); // searchFailures + } + + static void getStatisticSummations(Client client, ActionListener statsListener) { + QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), + DataFrameIndexerTransformStats.NAME))); + + SearchRequestBuilder requestBuilder = client.prepareSearch(DataFrameInternalIndex.INDEX_NAME) + .setSize(0) + .setQuery(queryBuilder); + + for(String statName : PROVIDED_STATS) { + requestBuilder.addAggregation(AggregationBuilders.sum(statName).field(statName)); + } - return new DataFrameFeatureSetUsage(available, enabled, transformsCountByState, accumulatedStats); + ActionListener getStatisticSummationsListener = ActionListener.wrap( + searchResponse -> { + if (searchResponse.getShardFailures().length > 0) { + logger.error("statistics summations search returned shard failures: {}", + Arrays.toString(searchResponse.getShardFailures())); + } + statsListener.onResponse(parseSearchAggs(searchResponse)); + }, + failure -> { + if (failure instanceof ResourceNotFoundException) { + statsListener.onResponse(DataFrameIndexerTransformStats.withDefaultTransformId()); + } else { + statsListener.onFailure(failure); + } + } + ); + ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ClientHelper.DATA_FRAME_ORIGIN, + requestBuilder.request(), + getStatisticSummationsListener, + client::search); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java index b751279abf233..c2f64dd66a583 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java @@ -6,17 +6,21 @@ package org.elasticsearch.xpack.dataframe.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; @@ -27,28 +31,29 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction.Request; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction.Response; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; -import org.elasticsearch.xpack.dataframe.persistence.DataFramePersistentTaskUtils; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformTask; -import org.elasticsearch.xpack.dataframe.util.BatchedDataIterator; import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; @@ -62,6 +67,8 @@ public class TransportGetDataFrameTransformsStatsAction extends GetDataFrameTransformsStatsAction.Response, GetDataFrameTransformsStatsAction.Response> { + private static final Logger logger = LogManager.getLogger(TransportGetDataFrameTransformsStatsAction.class); + private final Client client; private final DataFrameTransformsConfigManager dataFrameTransformsConfigManager; @Inject @@ -88,8 +95,6 @@ protected Response newResponse(Request request, List tasks, List listener) { List transformsStateAndStats = Collections.emptyList(); - assert task.getTransformId().equals(request.getId()) || request.getId().equals(MetaData.ALL); - // Little extra insurance, make sure we only return transforms that aren't cancelled if (task.isCancelled() == false) { DataFrameTransformStateAndStats transformStateAndStats = new DataFrameTransformStateAndStats(task.getTransformId(), @@ -101,139 +106,115 @@ protected void taskOperation(Request request, DataFrameTransformTask task, Actio } @Override - // TODO gather stats from docs when moved out of allocated task - protected void doExecute(Task task, Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener finalListener) { final ClusterState state = clusterService.state(); final DiscoveryNodes nodes = state.nodes(); - if (nodes.isLocalNodeElectedMaster()) { - if (DataFramePersistentTaskUtils.stateHasDataFrameTransforms(request.getId(), state)) { - ActionListener transformStatsListener = ActionListener.wrap( - response -> collectStatsForTransformsWithoutTasks(request, response, listener), - listener::onFailure - ); - super.doExecute(task, request, transformStatsListener); - } else { - // If we don't have any tasks, pass empty collection to this method - collectStatsForTransformsWithoutTasks(request, new Response(Collections.emptyList()), listener); - } - + dataFrameTransformsConfigManager.expandTransformIds(request.getId(), request.getPageParams(), ActionListener.wrap( + ids -> { + request.setExpandedIds(ids); + super.doExecute(task, request, ActionListener.wrap( + response -> collectStatsForTransformsWithoutTasks(request, response, finalListener), + finalListener::onFailure + )); + }, + e -> { + // If the index to search, or the individual config is not there, just return empty + if (e instanceof ResourceNotFoundException) { + finalListener.onResponse(new Response(Collections.emptyList())); + } else { + finalListener.onFailure(e); + } + } + )); } else { // Delegates GetTransforms to elected master node, so it becomes the coordinating node. // Non-master nodes may have a stale cluster state that shows transforms which are cancelled // on the master, which makes testing difficult. if (nodes.getMasterNode() == null) { - listener.onFailure(new MasterNotDiscoveredException("no known master nodes")); + finalListener.onFailure(new MasterNotDiscoveredException("no known master nodes")); } else { transportService.sendRequest(nodes.getMasterNode(), actionName, request, - new ActionListenerResponseHandler<>(listener, Response::new)); + new ActionListenerResponseHandler<>(finalListener, Response::new)); } } } - // TODO correct when we start storing stats in docs, right now, just return STOPPED and empty stats private void collectStatsForTransformsWithoutTasks(Request request, Response response, ActionListener listener) { - if (request.getId().equals(MetaData.ALL) == false) { - // If we did not find any tasks && this is NOT for ALL, verify that the single config exists, and return as stopped - // Empty other wise - if (response.getTransformsStateAndStats().isEmpty()) { - dataFrameTransformsConfigManager.getTransformConfiguration(request.getId(), ActionListener.wrap( - config -> - listener.onResponse( - new Response(Collections.singletonList(DataFrameTransformStateAndStats.initialStateAndStats(config.getId())))), - exception -> { - if (exception instanceof ResourceNotFoundException) { - listener.onResponse(new Response(Collections.emptyList())); - } else { - listener.onFailure(exception); - } - } - )); - } else { - // If it was not ALL && we DO have stored stats, simply return those as we found them all, since we only support 1 or all - listener.onResponse(response); - } + // We gathered all there is, no need to continue + if (request.getExpandedIds().size() == response.getTransformsStateAndStats().size()) { + listener.onResponse(response); return; } - // We only do this mass collection if we are getting ALL tasks - TransformIdCollector collector = new TransformIdCollector(); - collector.execute(ActionListener.wrap( - allIds -> { - response.getTransformsStateAndStats().forEach( - tsas -> allIds.remove(tsas.getId()) - ); - List statsWithoutTasks = allIds.stream() - .map(DataFrameTransformStateAndStats::initialStateAndStats) - .collect(Collectors.toList()); - statsWithoutTasks.addAll(response.getTransformsStateAndStats()); - statsWithoutTasks.sort(Comparator.comparing(DataFrameTransformStateAndStats::getId)); - listener.onResponse(new Response(statsWithoutTasks)); - }, - listener::onFailure - )); - } - - /** - * This class recursively queries a scroll search over all transform_ids and puts them in a set - */ - private class TransformIdCollector extends BatchedDataIterator> { - - private final Set ids = new HashSet<>(); - TransformIdCollector() { - super(client, DataFrameInternalIndex.INDEX_NAME); - } - void execute(final ActionListener> finalListener) { - if (this.hasNext()) { - next(ActionListener.wrap( - setOfIds -> execute(finalListener), - finalListener::onFailure - )); - } else { - finalListener.onResponse(ids); - } - } + Set transformsWithoutTasks = new HashSet<>(request.getExpandedIds()); + transformsWithoutTasks.removeAll(response.getTransformsStateAndStats().stream().map(DataFrameTransformStateAndStats::getId) + .collect(Collectors.toList())); + + // Small assurance that we are at least below the max. Terms search has a hard limit of 10k, we should at least be below that. + assert transformsWithoutTasks.size() <= Request.MAX_SIZE_RETURN; + + ActionListener searchStatsListener = ActionListener.wrap( + searchResponse -> { + List nodeFailures = new ArrayList<>(response.getNodeFailures()); + if (searchResponse.getShardFailures().length > 0) { + String msg = "transform statistics document search returned shard failures: " + + Arrays.toString(searchResponse.getShardFailures()); + logger.error(msg); + nodeFailures.add(new ElasticsearchException(msg)); + } + List allStateAndStats = response.getTransformsStateAndStats(); + for(SearchHit hit : searchResponse.getHits().getHits()) { + BytesReference source = hit.getSourceRef(); + try { + DataFrameIndexerTransformStats stats = parseFromSource(source); + allStateAndStats.add(DataFrameTransformStateAndStats.initialStateAndStats(stats.getTransformId(), stats)); + transformsWithoutTasks.remove(stats.getTransformId()); + } catch (IOException e) { + listener.onFailure(new ElasticsearchParseException("Could not parse data frame transform stats", e)); + return; + } + } + transformsWithoutTasks.forEach(transformId -> + allStateAndStats.add(DataFrameTransformStateAndStats.initialStateAndStats(transformId))); - @Override - protected QueryBuilder getQuery() { - return QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), DataFrameTransformConfig.NAME)); - } + // Any transform in collection could NOT have a task, so, even though the list is initially sorted + // it can easily become arbitrarily ordered based on which transforms don't have a task or stats docs + allStateAndStats.sort(Comparator.comparing(DataFrameTransformStateAndStats::getId)); - @Override - protected String map(SearchHit hit) { - BytesReference source = hit.getSourceRef(); - try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, stream)) { - return (String)parser.map().get(DataFrameField.ID.getPreferredName()); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to parse bucket", e); + listener.onResponse(new Response(allStateAndStats, response.getTaskFailures(), nodeFailures)); + }, + e -> { + if (e instanceof IndexNotFoundException) { + listener.onResponse(response); + } else { + listener.onFailure(e); + } } - } + ); - @Override - protected Set getCollection() { - return ids; - } + QueryBuilder builder = QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() + .filter(QueryBuilders.termsQuery(DataFrameField.ID.getPreferredName(), transformsWithoutTasks)) + .filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), DataFrameIndexerTransformStats.NAME))); - @Override - protected SortOrder sortOrder() { - return SortOrder.ASC; - } + SearchRequest searchRequest = client.prepareSearch(DataFrameInternalIndex.INDEX_NAME) + .addSort(DataFrameField.ID.getPreferredName(), SortOrder.ASC) + .setQuery(builder) + .request(); - @Override - protected String sortField() { - return DataFrameField.ID.getPreferredName(); - } + ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ClientHelper.DATA_FRAME_ORIGIN, + searchRequest, + searchStatsListener, client::search); + } - @Override - protected FetchSourceContext getFetchSourceContext() { - return new FetchSourceContext(true, new String[]{DataFrameField.ID.getPreferredName()}, new String[]{}); + private static DataFrameIndexerTransformStats parseFromSource(BytesReference source) throws IOException { + try (InputStream stream = source.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + return DataFrameIndexerTransformStats.PARSER.apply(parser, null); } } - - } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPreviewDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPreviewDataFrameTransformAction.java index 78f6823034811..63b2ed720c0be 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPreviewDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPreviewDataFrameTransformAction.java @@ -81,7 +81,7 @@ private void getPreview(Pivot pivot, ActionListener>> l ActionListener.wrap( r -> { final CompositeAggregation agg = r.getAggregations().get(COMPOSITE_AGGREGATION_NAME); - DataFrameIndexerTransformStats stats = new DataFrameIndexerTransformStats(); + DataFrameIndexerTransformStats stats = DataFrameIndexerTransformStats.withDefaultTransformId(); // remove all internal fields List> results = pivot.extractResults(agg, deducedMappings, stats) .map(record -> { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java index 2905e4c225793..26847c4881c3f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java @@ -15,6 +15,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditMessage; import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DestConfig; import org.elasticsearch.xpack.core.dataframe.transforms.SourceConfig; @@ -23,7 +24,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; -import static org.elasticsearch.xpack.core.dataframe.DataFrameField.DATA_FRAME_TRANSFORM_AUDIT_ID_FIELD; +import static org.elasticsearch.xpack.core.dataframe.DataFrameField.TRANSFORM_ID; public final class DataFrameInternalIndex { @@ -49,6 +50,7 @@ public final class DataFrameInternalIndex { // data types public static final String DOUBLE = "double"; + public static final String LONG = "long"; public static final String KEYWORD = "keyword"; public static IndexTemplateMetaData getIndexTemplateMetaData() throws IOException { @@ -83,7 +85,7 @@ private static XContentBuilder auditMappings() throws IOException { addMetaInformation(builder); builder.field(DYNAMIC, "false"); builder.startObject(PROPERTIES) - .startObject(DATA_FRAME_TRANSFORM_AUDIT_ID_FIELD) + .startObject(TRANSFORM_ID) .field(TYPE, KEYWORD) .endObject() .startObject(AbstractAuditMessage.LEVEL.getPreferredName()) @@ -125,7 +127,8 @@ private static XContentBuilder mappings() throws IOException { builder.startObject(DataFrameField.INDEX_DOC_TYPE.getPreferredName()).field(TYPE, KEYWORD).endObject(); // add the schema for transform configurations addDataFrameTransformsConfigMappings(builder); - + // add the schema for transform stats + addDataFrameTransformsStatsMappings(builder); // end type builder.endObject(); // end properties @@ -135,6 +138,41 @@ private static XContentBuilder mappings() throws IOException { return builder; } + + private static XContentBuilder addDataFrameTransformsStatsMappings(XContentBuilder builder) throws IOException { + return builder + .startObject(DataFrameIndexerTransformStats.NUM_PAGES.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.NUM_INPUT_DOCUMENTS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.NUM_OUTPUT_DOCUMENTS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.NUM_INVOCATIONS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.INDEX_TIME_IN_MS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.SEARCH_TIME_IN_MS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.INDEX_TOTAL.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.SEARCH_TOTAL.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.SEARCH_FAILURES.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.INDEX_FAILURES.getPreferredName()) + .field(TYPE, LONG) + .endObject(); + } + private static XContentBuilder addDataFrameTransformsConfigMappings(XContentBuilder builder) throws IOException { return builder .startObject(DataFrameField.ID.getPreferredName()) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java index 1392e95e79a5d..a19a0b65d85f1 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java @@ -17,9 +17,13 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; @@ -29,18 +33,26 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher; +import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpoint; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.Collections; +import java.util.List; import java.util.Map; import static org.elasticsearch.xpack.core.ClientHelper.DATA_FRAME_ORIGIN; @@ -172,6 +184,61 @@ public void getTransformConfiguration(String transformId, ActionListener> foundIdsListener) { + String[] idTokens = ExpandedIdsMatcher.tokenizeExpression(transformIdsExpression); + QueryBuilder queryBuilder = buildQueryFromTokenizedIds(idTokens, DataFrameTransformConfig.NAME); + + SearchRequest request = client.prepareSearch(DataFrameInternalIndex.INDEX_NAME) + .addSort(DataFrameField.ID.getPreferredName(), SortOrder.ASC) + .setFrom(pageParams.getFrom()) + .setSize(pageParams.getSize()) + .setQuery(queryBuilder) + // We only care about the `id` field, small optimization + .setFetchSource(DataFrameField.ID.getPreferredName(), "") + .request(); + + final ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(idTokens, true); + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), DATA_FRAME_ORIGIN, request, + ActionListener.wrap( + searchResponse -> { + List ids = new ArrayList<>(searchResponse.getHits().getHits().length); + for (SearchHit hit : searchResponse.getHits().getHits()) { + BytesReference source = hit.getSourceRef(); + try (InputStream stream = source.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, stream)) { + ids.add((String) parser.map().get(DataFrameField.ID.getPreferredName())); + } catch (IOException e) { + foundIdsListener.onFailure(new ElasticsearchParseException("failed to parse search hit for ids", e)); + return; + } + } + requiredMatches.filterMatchedIds(ids); + if (requiredMatches.hasUnmatchedIds()) { + // some required Ids were not found + foundIdsListener.onFailure( + new ResourceNotFoundException( + DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, + requiredMatches.unmatchedIdsString()))); + return; + } + foundIdsListener.onResponse(ids); + }, + foundIdsListener::onFailure + ), client::search); + } + /** * This deletes the configuration and all other documents corresponding to the transform id (e.g. checkpoints). * @@ -206,6 +273,58 @@ public void deleteTransform(String transformId, ActionListener listener })); } + public void putOrUpdateTransformStats(DataFrameIndexerTransformStats stats, ActionListener listener) { + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + XContentBuilder source = stats.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); + + IndexRequest indexRequest = new IndexRequest(DataFrameInternalIndex.INDEX_NAME) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .id(DataFrameIndexerTransformStats.documentId(stats.getTransformId())) + .source(source); + + executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap( + r -> listener.onResponse(true), + e -> listener.onFailure(new RuntimeException( + DataFrameMessages.getMessage(DataFrameMessages.DATA_FRAME_FAILED_TO_PERSIST_STATS, stats.getTransformId()), + e)) + )); + } catch (IOException e) { + // not expected to happen but for the sake of completeness + listener.onFailure(new ElasticsearchParseException( + DataFrameMessages.getMessage(DataFrameMessages.DATA_FRAME_FAILED_TO_PERSIST_STATS, stats.getTransformId()), + e)); + } + } + + public void getTransformStats(String transformId, ActionListener resultListener) { + GetRequest getRequest = new GetRequest(DataFrameInternalIndex.INDEX_NAME, DataFrameIndexerTransformStats.documentId(transformId)); + executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, GetAction.INSTANCE, getRequest, ActionListener.wrap(getResponse -> { + + if (getResponse.isExists() == false) { + resultListener.onFailure(new ResourceNotFoundException( + DataFrameMessages.getMessage(DataFrameMessages.DATA_FRAME_UNKNOWN_TRANSFORM_STATS, transformId))); + return; + } + BytesReference source = getResponse.getSourceAsBytesRef(); + try (InputStream stream = source.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream)) { + resultListener.onResponse(DataFrameIndexerTransformStats.fromXContent(parser)); + } catch (Exception e) { + logger.error( + DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_PARSE_TRANSFORM_STATISTICS_CONFIGURATION, transformId), e); + resultListener.onFailure(e); + } + }, e -> { + if (e instanceof ResourceNotFoundException) { + resultListener.onFailure(new ResourceNotFoundException( + DataFrameMessages.getMessage(DataFrameMessages.DATA_FRAME_UNKNOWN_TRANSFORM_STATS, transformId))); + } else { + resultListener.onFailure(e); + } + })); + } + private void parseTransformLenientlyFromSource(BytesReference source, String transformId, ActionListener transformListener) { try (InputStream stream = source.streamInput(); @@ -229,4 +348,28 @@ private void parseCheckpointsLenientlyFromSource(BytesReference source, String t transformListener.onFailure(e); } } + + private QueryBuilder buildQueryFromTokenizedIds(String[] idTokens, String resourceName) { + BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), resourceName)); + if (Strings.isAllOrWildcard(idTokens) == false) { + List terms = new ArrayList<>(); + BoolQueryBuilder shouldQueries = new BoolQueryBuilder(); + for (String token : idTokens) { + if (Regex.isSimpleMatchPattern(token)) { + shouldQueries.should(QueryBuilders.wildcardQuery(DataFrameField.ID.getPreferredName(), token)); + } else { + terms.add(token); + } + } + if (terms.isEmpty() == false) { + shouldQueries.should(QueryBuilders.termsQuery(DataFrameField.ID.getPreferredName(), terms)); + } + + if (shouldQueries.should().isEmpty() == false) { + queryBuilder.filter(shouldQueries); + } + } + return QueryBuilders.constantScoreQuery(queryBuilder); + } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java index 0609c2d499fc7..87cc13edbc329 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction; @@ -27,6 +28,11 @@ public RestGetDataFrameTransformsStatsAction(Settings settings, RestController c protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(DataFrameField.ID.getPreferredName()); GetDataFrameTransformsStatsAction.Request request = new GetDataFrameTransformsStatsAction.Request(id); + if (restRequest.hasParam(PageParams.FROM.getPreferredName()) || restRequest.hasParam(PageParams.SIZE.getPreferredName())) { + request.setPageParams( + new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), + restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE))); + } return channel -> client.execute(GetDataFrameTransformsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java index bb07722ddeed0..090a9c9cfccc0 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java @@ -39,8 +39,11 @@ public abstract class DataFrameIndexer extends AsyncTwoPhaseIndexer initialState, Map initialPosition) { - super(executor, initialState, initialPosition, new DataFrameIndexerTransformStats()); + public DataFrameIndexer(Executor executor, + AtomicReference initialState, + Map initialPosition, + DataFrameIndexerTransformStats jobStats) { + super(executor, initialState, initialPosition, jobStats); } protected abstract DataFrameTransformConfig getConfig(); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java index d53354db2aa70..e3c27fd21fe03 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java @@ -8,6 +8,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.persistent.AllocatedPersistentTask; @@ -60,18 +62,33 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable DataFrameTr DataFrameTransformTask buildTask = (DataFrameTransformTask) task; SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job( DataFrameTransformTask.SCHEDULE_NAME + "_" + params.getId(), next()); - DataFrameTransformState transformState = (DataFrameTransformState) state; if (transformState != null && transformState.getTaskState() == DataFrameTransformTaskState.FAILED) { logger.warn("Tried to start failed transform [" + params.getId() + "] failure reason: " + transformState.getReason()); return; } + transformsConfigManager.getTransformStats(params.getId(), ActionListener.wrap( + stats -> { + // Initialize with the previously recorded stats + buildTask.initializePreviousStats(stats); + scheduleTask(buildTask, schedulerJob, params.getId()); + }, + error -> { + if (error instanceof ResourceNotFoundException == false) { + logger.error("Unable to load previously persisted statistics for transform [" + params.getId() + "]", error); + } + scheduleTask(buildTask, schedulerJob, params.getId()); + } + )); + } + + private void scheduleTask(DataFrameTransformTask buildTask, SchedulerEngine.Job schedulerJob, String id) { // Note that while the task is added to the scheduler here, the internal state will prevent // it from doing any work until the task is "started" via the StartTransform api schedulerEngine.register(buildTask); schedulerEngine.add(schedulerJob); - logger.info("Data frame transform [" + params.getId() + "] created."); + logger.info("Data frame transform [" + id + "] created."); } static SchedulerEngine.Schedule next() { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java index b8bc2870307aa..23884afec3348 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java @@ -64,6 +64,7 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S private final ThreadPool threadPool; private final DataFrameIndexer indexer; private final Auditor auditor; + private final DataFrameIndexerTransformStats previousStats; private final AtomicReference taskState; private final AtomicReference stateReason; @@ -110,6 +111,7 @@ public DataFrameTransformTask(long id, String type, String action, TaskId parent this.indexer = new ClientDataFrameIndexer(transform.getId(), transformsConfigManager, transformsCheckpointService, new AtomicReference<>(initialState), initialPosition, client, auditor); this.generation = new AtomicReference<>(initialGeneration); + this.previousStats = new DataFrameIndexerTransformStats(transform.getId()); this.taskState = new AtomicReference<>(initialTaskState); this.stateReason = new AtomicReference<>(initialReason); this.failureCount = new AtomicInteger(0); @@ -131,8 +133,12 @@ public DataFrameTransformState getState() { return new DataFrameTransformState(taskState.get(), indexer.getState(), indexer.getPosition(), generation.get(), stateReason.get()); } + void initializePreviousStats(DataFrameIndexerTransformStats stats) { + previousStats.merge(stats); + } + public DataFrameIndexerTransformStats getStats() { - return indexer.getStats(); + return new DataFrameIndexerTransformStats(previousStats).merge(indexer.getStats()); } public long getGeneration() { @@ -297,6 +303,7 @@ protected class ClientDataFrameIndexer extends DataFrameIndexer { private final DataFrameTransformsCheckpointService transformsCheckpointService; private final String transformId; private final Auditor auditor; + private volatile DataFrameIndexerTransformStats previouslyPersistedStats = null; // Keeps track of the last exception that was written to our audit, keeps us from spamming the audit index private volatile String lastAuditedExceptionMessage = null; private Map fieldMappings = null; @@ -307,7 +314,8 @@ public ClientDataFrameIndexer(String transformId, DataFrameTransformsConfigManag DataFrameTransformsCheckpointService transformsCheckpointService, AtomicReference initialState, Map initialPosition, Client client, Auditor auditor) { - super(threadPool.executor(ThreadPool.Names.GENERIC), initialState, initialPosition); + super(threadPool.executor(ThreadPool.Names.GENERIC), initialState, initialPosition, + new DataFrameIndexerTransformStats(transformId)); this.transformId = transformId; this.transformsConfigManager = transformsConfigManager; this.transformsCheckpointService = transformsCheckpointService; @@ -422,7 +430,39 @@ protected void doSaveState(IndexerState indexerState, Map positi generation.get(), stateReason.get()); logger.info("Updating persistent state of transform [" + transform.getId() + "] to [" + state.toString() + "]"); - persistStateToClusterState(state, ActionListener.wrap(t -> next.run(), e -> next.run())); + + // Persisting stats when we call `doSaveState` should be ok as we only call it on a state transition and + // only every-so-often when doing the bulk indexing calls. See AsyncTwoPhaseIndexer#onBulkResponse for current periodicity + ActionListener> updateClusterStateListener = ActionListener.wrap( + task -> { + // Make a copy of the previousStats so that they are not constantly updated when `merge` is called + DataFrameIndexerTransformStats tempStats = new DataFrameIndexerTransformStats(previousStats).merge(getStats()); + + // Only persist the stats if something has actually changed + if (previouslyPersistedStats == null || previouslyPersistedStats.equals(tempStats) == false) { + transformsConfigManager.putOrUpdateTransformStats(tempStats, + ActionListener.wrap( + r -> { + previouslyPersistedStats = tempStats; + next.run(); + }, + statsExc -> { + logger.error("Updating stats of transform [" + transform.getId() + "] failed", statsExc); + next.run(); + } + )); + // The stats that we have previously written to the doc is the same as as it is now, no need to update it + } else { + next.run(); + } + }, + exc -> { + logger.error("Updating persistent state of transform [" + transform.getId() + "] failed", exc); + next.run(); + } + ); + + persistStateToClusterState(state, updateClusterStateListener); } @Override diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java index bea7e5b0148e0..f70cbb8b277c2 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java @@ -6,8 +6,10 @@ package org.elasticsearch.xpack.dataframe; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -15,25 +17,24 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; -import org.elasticsearch.xpack.core.dataframe.DataFrameFeatureSetUsage; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfigTests; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStatsTests; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; -import static java.lang.Math.toIntExact; +import static org.elasticsearch.xpack.dataframe.DataFrameFeatureSet.PROVIDED_STATS; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.core.Is.is; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -47,7 +48,10 @@ public void init() { } public void testAvailable() { - DataFrameFeatureSet featureSet = new DataFrameFeatureSet(Settings.EMPTY, mock(Client.class), licenseState); + DataFrameFeatureSet featureSet = new DataFrameFeatureSet(Settings.EMPTY, + mock(ClusterService.class), + mock(Client.class), + licenseState); boolean available = randomBoolean(); when(licenseState.isDataFrameAllowed()).thenReturn(available); assertThat(featureSet.available(), is(available)); @@ -57,89 +61,67 @@ public void testEnabledSetting() { boolean enabled = randomBoolean(); Settings.Builder settings = Settings.builder(); settings.put("xpack.data_frame.enabled", enabled); - DataFrameFeatureSet featureSet = new DataFrameFeatureSet(settings.build(), mock(Client.class), licenseState); + DataFrameFeatureSet featureSet = new DataFrameFeatureSet(settings.build(), + mock(ClusterService.class), + mock(Client.class), + licenseState); assertThat(featureSet.enabled(), is(enabled)); } public void testEnabledDefault() { - DataFrameFeatureSet featureSet = new DataFrameFeatureSet(Settings.EMPTY, mock(Client.class), licenseState); + DataFrameFeatureSet featureSet = new DataFrameFeatureSet(Settings.EMPTY, + mock(ClusterService.class), + mock(Client.class), + licenseState); assertTrue(featureSet.enabled()); } - public void testUsage() throws IOException { - List transformsStateAndStats = new ArrayList<>(); - int count = randomIntBetween(0, 10); - int uniqueId = 0; - for (int i = 0; i < count; ++i) { - transformsStateAndStats.add( - DataFrameTransformStateAndStatsTests.randomDataFrameTransformStateAndStats("df-" + Integer.toString(uniqueId++))); + public void testParseSearchAggs() { + Aggregations emptyAggs = new Aggregations(Collections.emptyList()); + SearchResponse withEmptyAggs = mock(SearchResponse.class); + when(withEmptyAggs.getAggregations()).thenReturn(emptyAggs); + + assertThat(DataFrameFeatureSet.parseSearchAggs(withEmptyAggs), equalTo(DataFrameIndexerTransformStats.withDefaultTransformId())); + + DataFrameIndexerTransformStats expectedStats = new DataFrameIndexerTransformStats("_all", + 1, // numPages + 2, // numInputDocuments + 3, // numOutputDocuments + 4, // numInvocations + 5, // indexTime + 6, // searchTime + 7, // indexTotal + 8, // searchTotal + 9, // indexFailures + 10); // searchFailures + + int currentStat = 1; + List aggs = new ArrayList<>(PROVIDED_STATS.length); + for (String statName : PROVIDED_STATS) { + aggs.add(buildAgg(statName, (double) currentStat++)); } + Aggregations aggregations = new Aggregations(aggs); + SearchResponse withAggs = mock(SearchResponse.class); + when(withAggs.getAggregations()).thenReturn(aggregations); - count = randomIntBetween(0, 10); - List transformConfigWithoutTasks = new ArrayList<>(); - for (int i = 0; i < count; ++i) { - transformConfigWithoutTasks.add( - DataFrameTransformConfigTests.randomDataFrameTransformConfig("df-" + Integer.toString(uniqueId++))); - } - - List transformConfigWithTasks = - new ArrayList<>(transformsStateAndStats.size() + transformConfigWithoutTasks.size()); - - transformsStateAndStats.forEach(stats -> - transformConfigWithTasks.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig(stats.getId()))); - transformConfigWithoutTasks.forEach(withoutTask -> - transformsStateAndStats.add(DataFrameTransformStateAndStats.initialStateAndStats(withoutTask.getId()))); - - boolean enabled = randomBoolean(); - boolean available = randomBoolean(); - DataFrameFeatureSetUsage usage = DataFrameFeatureSet.createUsage(available, - enabled, - transformsStateAndStats); - - assertEquals(enabled, usage.enabled()); - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - usage.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertThat(DataFrameFeatureSet.parseSearchAggs(withAggs), equalTo(expectedStats)); + } - XContentParser parser = createParser(builder); - Map usageAsMap = parser.map(); - assertEquals(available, (boolean) XContentMapValues.extractValue("available", usageAsMap)); - - if (transformsStateAndStats.isEmpty() && transformConfigWithoutTasks.isEmpty()) { - // no transforms, no stats - assertEquals(null, XContentMapValues.extractValue("transforms", usageAsMap)); - assertEquals(null, XContentMapValues.extractValue("stats", usageAsMap)); - } else { - assertEquals(transformsStateAndStats.size(), XContentMapValues.extractValue("transforms._all", usageAsMap)); - - Map stateCounts = new HashMap<>(); - transformsStateAndStats.stream() - .map(x -> x.getTransformState().getIndexerState().value()) - .forEach(x -> stateCounts.merge(x, 1, Integer::sum)); - stateCounts.forEach((k, v) -> assertEquals(v, XContentMapValues.extractValue("transforms." + k, usageAsMap))); - - // use default constructed stats object for assertions if transformsStateAndStats is empty - DataFrameIndexerTransformStats combinedStats = new DataFrameIndexerTransformStats(); - if (transformsStateAndStats.isEmpty() == false) { - combinedStats = transformsStateAndStats.stream().map(x -> x.getTransformStats()).reduce((l, r) -> l.merge(r)).get(); - } - - assertEquals(toIntExact(combinedStats.getIndexFailures()), - XContentMapValues.extractValue("stats.index_failures", usageAsMap)); - assertEquals(toIntExact(combinedStats.getIndexTotal()), - XContentMapValues.extractValue("stats.index_total", usageAsMap)); - assertEquals(toIntExact(combinedStats.getSearchTime()), - XContentMapValues.extractValue("stats.search_time_in_ms", usageAsMap)); - assertEquals(toIntExact(combinedStats.getNumDocuments()), - XContentMapValues.extractValue("stats.documents_processed", usageAsMap)); - } - } + private static Aggregation buildAgg(String name, double value) { + NumericMetricsAggregation.SingleValue agg = mock(NumericMetricsAggregation.SingleValue.class); + when(agg.getName()).thenReturn(name); + when(agg.value()).thenReturn(value); + return agg; } public void testUsageDisabled() throws IOException, InterruptedException, ExecutionException { when(licenseState.isDataFrameAllowed()).thenReturn(true); Settings.Builder settings = Settings.builder(); settings.put("xpack.data_frame.enabled", false); - DataFrameFeatureSet featureSet = new DataFrameFeatureSet(settings.build(), mock(Client.class), licenseState); + DataFrameFeatureSet featureSet = new DataFrameFeatureSet(settings.build(), + mock(ClusterService.class), + mock(Client.class), + licenseState); PlainActionFuture future = new PlainActionFuture<>(); featureSet.usage(future); XPackFeatureSet.Usage usage = future.get(); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformsCheckpointServiceTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformsCheckpointServiceTests.java index 0868315165cdc..9cc2769e7d149 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformsCheckpointServiceTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/checkpoint/DataFrameTransformsCheckpointServiceTests.java @@ -82,7 +82,6 @@ public void testExtractIndexCheckpointsLostPrimaries() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40368") public void testExtractIndexCheckpointsInconsistentGlobalCheckpoints() { Map expectedCheckpoints = new HashMap<>(); Set indices = randomUserIndices(); @@ -161,7 +160,7 @@ private static ShardStats[] createRandomShardStats(Map expectedC long globalCheckpoint = randomBoolean() ? localCheckpoint : randomLongBetween(0L, 100000000L); long maxSeqNo = Math.max(localCheckpoint, globalCheckpoint); - SeqNoStats seqNoStats = new SeqNoStats(maxSeqNo, localCheckpoint, globalCheckpoint); + final SeqNoStats validSeqNoStats = new SeqNoStats(maxSeqNo, localCheckpoint, globalCheckpoint); checkpoints.add(globalCheckpoint); for (int replica = 0; replica < numShardCopies; replica++) { @@ -194,10 +193,16 @@ private static ShardStats[] createRandomShardStats(Map expectedC if (inconsistentReplica == replica) { // overwrite - seqNoStats = new SeqNoStats(maxSeqNo, localCheckpoint, globalCheckpoint + randomLongBetween(10L, 100L)); + SeqNoStats invalidSeqNoStats = + new SeqNoStats(maxSeqNo, localCheckpoint, globalCheckpoint + randomLongBetween(10L, 100L)); + shardStats.add( + new ShardStats(shardRouting, + new ShardPath(false, path, path, shardId), stats, null, invalidSeqNoStats, null)); + } else { + shardStats.add( + new ShardStats(shardRouting, + new ShardPath(false, path, path, shardId), stats, null, validSeqNoStats, null)); } - - shardStats.add(new ShardStats(shardRouting, new ShardPath(false, path, path, shardId), stats, null, seqNoStats, null)); } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java index f9c5d405fe665..8fe384553bcec 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpoint; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpointTests; @@ -15,6 +16,13 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfigTests; import org.junit.Before; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + public class DataFrameTransformsConfigManagerTests extends DataFrameSingleNodeTestCase { private DataFrameTransformsConfigManager transformsConfigManager; @@ -128,4 +136,84 @@ public void testCreateReadDeleteCheckPoint() throws InterruptedException { assertAsync(listener -> transformsConfigManager.getTransformCheckpoint(checkpoint.getTransformId(), checkpoint.getCheckpoint(), listener), DataFrameTransformCheckpoint.EMPTY, null, null); } + + public void testExpandIds() throws Exception { + DataFrameTransformConfig transformConfig1 = DataFrameTransformConfigTests.randomDataFrameTransformConfig("transform1_expand"); + DataFrameTransformConfig transformConfig2 = DataFrameTransformConfigTests.randomDataFrameTransformConfig("transform2_expand"); + DataFrameTransformConfig transformConfig3 = DataFrameTransformConfigTests.randomDataFrameTransformConfig("transform3_expand"); + + // create transform + assertAsync(listener -> transformsConfigManager.putTransformConfiguration(transformConfig1, listener), true, null, null); + assertAsync(listener -> transformsConfigManager.putTransformConfiguration(transformConfig2, listener), true, null, null); + assertAsync(listener -> transformsConfigManager.putTransformConfiguration(transformConfig3, listener), true, null, null); + + + // expand 1 id + assertAsync(listener -> + transformsConfigManager.expandTransformIds(transformConfig1.getId(), + PageParams.defaultParams(), + listener), + Collections.singletonList("transform1_expand"), + null, + null); + + // expand 2 ids explicitly + assertAsync(listener -> + transformsConfigManager.expandTransformIds("transform1_expand,transform2_expand", + PageParams.defaultParams(), + listener), + Arrays.asList("transform1_expand", "transform2_expand"), + null, + null); + + // expand 3 ids wildcard and explicit + assertAsync(listener -> + transformsConfigManager.expandTransformIds("transform1*,transform2_expand,transform3_expand", + PageParams.defaultParams(), + listener), + Arrays.asList("transform1_expand", "transform2_expand", "transform3_expand"), + null, + null); + + // expand 3 ids _all + assertAsync(listener -> + transformsConfigManager.expandTransformIds("_all", + PageParams.defaultParams(), + listener), + Arrays.asList("transform1_expand", "transform2_expand", "transform3_expand"), + null, + null); + + // expand 1 id _all with pagination + assertAsync(listener -> + transformsConfigManager.expandTransformIds("_all", + new PageParams(0, 1), + listener), + Collections.singletonList("transform1_expand"), + null, + null); + + // expand 2 later ids _all with pagination + assertAsync(listener -> + transformsConfigManager.expandTransformIds("_all", + new PageParams(1, 2), + listener), + Arrays.asList("transform2_expand", "transform3_expand"), + null, + null); + + // expand 1 id explicitly that does not exist + assertAsync(listener -> + transformsConfigManager.expandTransformIds("unknown,unknown2", + new PageParams(1, 2), + listener), + (List)null, + null, + e -> { + assertThat(e, instanceOf(ResourceNotFoundException.class)); + assertThat(e.getMessage(), + equalTo(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, "unknown,unknown2"))); + }); + + } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java index eedf6264f348b..c2c22dc6ffad4 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java @@ -501,7 +501,7 @@ aggTypedName, asMap( "value", 122.55), DOC_COUNT, 44) )); - DataFrameIndexerTransformStats stats = new DataFrameIndexerTransformStats(); + DataFrameIndexerTransformStats stats = DataFrameIndexerTransformStats.withDefaultTransformId(); Map fieldTypeMap = asStringMap( aggName, "double", @@ -534,7 +534,7 @@ aggTypedName, asMap( private void executeTest(GroupConfig groups, Collection aggregationBuilders, Map input, Map fieldTypeMap, List> expected, long expectedDocCounts) throws IOException { - DataFrameIndexerTransformStats stats = new DataFrameIndexerTransformStats(); + DataFrameIndexerTransformStats stats = DataFrameIndexerTransformStats.withDefaultTransformId(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.map(input); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleRunnerTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleRunnerTests.java index 565a9723c8c6c..511d0e5be1ab9 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleRunnerTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleRunnerTests.java @@ -385,14 +385,12 @@ public void testRunStateChangePolicyWithAsyncActionNextStep() throws Exception { ClusterState before = clusterService.state(); CountDownLatch latch = new CountDownLatch(1); step.setLatch(latch); + CountDownLatch asyncLatch = new CountDownLatch(1); + nextStep.setLatch(asyncLatch); runner.runPolicyAfterStateChange(policyName, indexMetaData); // Wait for the cluster state action step awaitLatch(latch, 5, TimeUnit.SECONDS); - - CountDownLatch asyncLatch = new CountDownLatch(1); - nextStep.setLatch(asyncLatch); - // Wait for the async action step awaitLatch(asyncLatch, 5, TimeUnit.SECONDS); ClusterState after = clusterService.state(); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 610275705eef8..e85a92c061366 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -56,17 +56,14 @@ import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps; import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction; -import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.rollup.Rollup; import org.elasticsearch.xpack.rollup.RollupJobIdentifierUtils; import org.elasticsearch.xpack.rollup.RollupRequestTranslator; import org.elasticsearch.xpack.rollup.RollupResponseTranslator; -import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; @@ -286,11 +283,8 @@ static QueryBuilder rewriteQuery(QueryBuilder builder, Set jobCap } else if (builder.getWriteableName().equals(RangeQueryBuilder.NAME)) { RangeQueryBuilder range = (RangeQueryBuilder) builder; String fieldName = range.fieldName(); - // Many range queries don't include the timezone because the default is UTC, but the query - // builder will return null so we need to set it here - String timeZone = range.timeZone() == null ? DateTimeZone.UTC.toString() : range.timeZone(); - String rewrittenFieldName = rewriteFieldName(jobCaps, RangeQueryBuilder.NAME, fieldName, timeZone); + String rewrittenFieldName = rewriteFieldName(jobCaps, RangeQueryBuilder.NAME, fieldName); RangeQueryBuilder rewritten = new RangeQueryBuilder(rewrittenFieldName) .from(range.from()) .to(range.to()) @@ -306,12 +300,12 @@ static QueryBuilder rewriteQuery(QueryBuilder builder, Set jobCap } else if (builder.getWriteableName().equals(TermQueryBuilder.NAME)) { TermQueryBuilder term = (TermQueryBuilder) builder; String fieldName = term.fieldName(); - String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName, null); + String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName); return new TermQueryBuilder(rewrittenFieldName, term.value()); } else if (builder.getWriteableName().equals(TermsQueryBuilder.NAME)) { TermsQueryBuilder terms = (TermsQueryBuilder) builder; String fieldName = terms.fieldName(); - String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName, null); + String rewrittenFieldName = rewriteFieldName(jobCaps, TermQueryBuilder.NAME, fieldName); return new TermsQueryBuilder(rewrittenFieldName, terms.values()); } else if (builder.getWriteableName().equals(MatchAllQueryBuilder.NAME)) { // no-op @@ -321,11 +315,7 @@ static QueryBuilder rewriteQuery(QueryBuilder builder, Set jobCap } } - private static String rewriteFieldName(Set jobCaps, - String builderName, - String fieldName, - String timeZone) { - List incompatibleTimeZones = timeZone == null ? Collections.emptyList() : new ArrayList<>(); + private static String rewriteFieldName(Set jobCaps, String builderName, String fieldName) { List rewrittenFieldNames = jobCaps.stream() // We only care about job caps that have the query's target field .filter(caps -> caps.getFieldCaps().keySet().contains(fieldName)) @@ -335,17 +325,7 @@ private static String rewriteFieldName(Set jobCaps, // For now, we only allow filtering on grouping fields .filter(agg -> { String type = (String)agg.get(RollupField.AGG); - - // If the cap is for a date_histo, and the query is a range, the timezones need to match - if (type.equals(DateHistogramAggregationBuilder.NAME) && timeZone != null) { - boolean matchingTZ = ((String)agg.get(DateHistogramGroupConfig.TIME_ZONE)) - .equalsIgnoreCase(timeZone); - if (matchingTZ == false) { - incompatibleTimeZones.add((String)agg.get(DateHistogramGroupConfig.TIME_ZONE)); - } - return matchingTZ; - } - // Otherwise just make sure it's one of the three groups + // make sure it's one of the three groups return type.equals(TermsAggregationBuilder.NAME) || type.equals(DateHistogramAggregationBuilder.NAME) || type.equals(HistogramAggregationBuilder.NAME); @@ -363,14 +343,8 @@ private static String rewriteFieldName(Set jobCaps, .distinct() .collect(ArrayList::new, List::addAll, List::addAll); if (rewrittenFieldNames.isEmpty()) { - if (incompatibleTimeZones.isEmpty()) { - throw new IllegalArgumentException("Field [" + fieldName + "] in [" + builderName + throw new IllegalArgumentException("Field [" + fieldName + "] in [" + builderName + "] query is not available in selected rollup indices, cannot query."); - } else { - throw new IllegalArgumentException("Field [" + fieldName + "] in [" + builderName - + "] query was found in rollup indices, but requested timezone is not compatible. Options include: " - + incompatibleTimeZones); - } } else if (rewrittenFieldNames.size() > 1) { throw new IllegalArgumentException("Ambiguous field name resolution when mapping to rolled fields. Field name [" + fieldName + "] was mapped to: [" + Strings.collectionToDelimitedString(rewrittenFieldNames, ",") + "]."); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index 0032b5a88a563..5a851d17e5eaf 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -140,16 +140,15 @@ public void testRangeNullTimeZone() { assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp")); } - public void testRangeWrongTZ() { + public void testRangeDifferentTZ() { final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "UTC")); final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); - Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("CET"), caps)); - assertThat(e.getMessage(), equalTo("Field [foo] in [range] query was found in rollup indices, but requested timezone is not " + - "compatible. Options include: [UTC]")); + QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("CET"), caps); + assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); + assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp")); } public void testTermQuery() { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java index 2e5832d0834e7..ce7e54a3cb29d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java @@ -150,10 +150,18 @@ public void usage(ActionListener listener) { } static Map sslUsage(Settings settings) { - Map map = new HashMap<>(2); - map.put("http", singletonMap("enabled", HTTP_SSL_ENABLED.get(settings))); - map.put("transport", singletonMap("enabled", TRANSPORT_SSL_ENABLED.get(settings))); - return map; + // If security has been explicitly disabled in the settings, then SSL is also explicitly disabled, and we don't want to report + // these http/transport settings as they would be misleading (they could report `true` even though they were ignored) + // But, if security has not been explicitly configured, but has defaulted to off due to the current license type, + // then these SSL settings are still respected (that is SSL might be enabled, while the rest of security is disabled). + if (XPackSettings.SECURITY_ENABLED.get(settings)) { + Map map = new HashMap<>(2); + map.put("http", singletonMap("enabled", HTTP_SSL_ENABLED.get(settings))); + map.put("transport", singletonMap("enabled", TRANSPORT_SSL_ENABLED.get(settings))); + return map; + } else { + return Collections.emptyMap(); + } } static Map tokenServiceUsage(Settings settings) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java index 146dc78698eca..2fc2ea8865d91 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.junit.Before; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -126,29 +127,10 @@ public void testUsage() throws Exception { final boolean rolesStoreEnabled = randomBoolean(); - doAnswer(invocationOnMock -> { - ActionListener> listener = (ActionListener>) invocationOnMock.getArguments()[0]; - if (rolesStoreEnabled) { - listener.onResponse(Collections.singletonMap("count", 1)); - } else { - listener.onResponse(Collections.emptyMap()); - } - return Void.TYPE; - }).when(rolesStore).usageStats(any(ActionListener.class)); + configureRoleStoreUsage(rolesStoreEnabled); final boolean roleMappingStoreEnabled = randomBoolean(); - doAnswer(invocationOnMock -> { - ActionListener> listener = (ActionListener) invocationOnMock.getArguments()[0]; - if (roleMappingStoreEnabled) { - final Map map = new HashMap<>(); - map.put("size", 12L); - map.put("enabled", 10L); - listener.onResponse(map); - } else { - listener.onResponse(Collections.emptyMap()); - } - return Void.TYPE; - }).when(roleMappingStore).usageStats(any(ActionListener.class)); + configureRoleMappingStoreUsage(roleMappingStoreEnabled); Map realmsUsageStats = new HashMap<>(); for (int i = 0; i < 5; i++) { @@ -158,11 +140,7 @@ public void testUsage() throws Exception { realmUsage.put("key2", Arrays.asList(i)); realmUsage.put("key3", Arrays.asList(i % 2 == 0)); } - doAnswer(invocationOnMock -> { - ActionListener> listener = (ActionListener) invocationOnMock.getArguments()[0]; - listener.onResponse(realmsUsageStats); - return Void.TYPE; - }).when(realms).usageStats(any(ActionListener.class)); + configureRealmsUsage(realmsUsageStats); final boolean anonymousEnabled = randomBoolean(); if (anonymousEnabled) { @@ -182,11 +160,7 @@ public void testUsage() throws Exception { assertThat(usage.name(), is(XPackField.SECURITY)); assertThat(usage.enabled(), is(enabled)); assertThat(usage.available(), is(authcAuthzAvailable)); - XContentSource source; - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - usage.toXContent(builder, ToXContent.EMPTY_PARAMS); - source = new XContentSource(builder); - } + XContentSource source = getXContentSource(usage); if (enabled) { if (authcAuthzAvailable) { @@ -251,4 +225,101 @@ public void testUsage() throws Exception { } } } + + public void testUsageOnTrialLicenseWithSecurityDisabledByDefault() throws Exception { + when(licenseState.isSecurityAvailable()).thenReturn(true); + when(licenseState.isSecurityDisabledByTrialLicense()).thenReturn(true); + + Settings.Builder settings = Settings.builder().put(this.settings); + + final boolean httpSSLEnabled = randomBoolean(); + settings.put("xpack.security.http.ssl.enabled", httpSSLEnabled); + final boolean transportSSLEnabled = randomBoolean(); + settings.put("xpack.security.transport.ssl.enabled", transportSSLEnabled); + + final boolean auditingEnabled = randomBoolean(); + settings.put(XPackSettings.AUDIT_ENABLED.getKey(), auditingEnabled); + + final boolean rolesStoreEnabled = randomBoolean(); + configureRoleStoreUsage(rolesStoreEnabled); + + final boolean roleMappingStoreEnabled = randomBoolean(); + configureRoleMappingStoreUsage(roleMappingStoreEnabled); + + configureRealmsUsage(Collections.emptyMap()); + + SecurityFeatureSet featureSet = new SecurityFeatureSet(settings.build(), licenseState, + realms, rolesStore, roleMappingStore, ipFilter); + PlainActionFuture future = new PlainActionFuture<>(); + featureSet.usage(future); + XPackFeatureSet.Usage securityUsage = future.get(); + BytesStreamOutput out = new BytesStreamOutput(); + securityUsage.writeTo(out); + XPackFeatureSet.Usage serializedUsage = new SecurityFeatureSetUsage(out.bytes().streamInput()); + for (XPackFeatureSet.Usage usage : Arrays.asList(securityUsage, serializedUsage)) { + assertThat(usage, is(notNullValue())); + assertThat(usage.name(), is(XPackField.SECURITY)); + assertThat(usage.enabled(), is(false)); + assertThat(usage.available(), is(true)); + XContentSource source = getXContentSource(usage); + + // check SSL : This is permitted even though security has been dynamically disabled by the trial license. + assertThat(source.getValue("ssl"), is(notNullValue())); + assertThat(source.getValue("ssl.http.enabled"), is(httpSSLEnabled)); + assertThat(source.getValue("ssl.transport.enabled"), is(transportSSLEnabled)); + + // everything else is missing because security is disabled + assertThat(source.getValue("realms"), is(nullValue())); + assertThat(source.getValue("token_service"), is(nullValue())); + assertThat(source.getValue("api_key_service"), is(nullValue())); + assertThat(source.getValue("audit"), is(nullValue())); + assertThat(source.getValue("anonymous"), is(nullValue())); + assertThat(source.getValue("ipfilter"), is(nullValue())); + assertThat(source.getValue("roles"), is(nullValue())); + } + } + + private XContentSource getXContentSource(XPackFeatureSet.Usage usage) throws IOException { + XContentSource source; + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + usage.toXContent(builder, ToXContent.EMPTY_PARAMS); + source = new XContentSource(builder); + } + return source; + } + + private void configureRealmsUsage(Map realmsUsageStats) { + doAnswer(invocationOnMock -> { + ActionListener> listener = (ActionListener) invocationOnMock.getArguments()[0]; + listener.onResponse(realmsUsageStats); + return Void.TYPE; + }).when(realms).usageStats(any(ActionListener.class)); + } + + private void configureRoleStoreUsage(boolean rolesStoreEnabled) { + doAnswer(invocationOnMock -> { + ActionListener> listener = (ActionListener>) invocationOnMock.getArguments()[0]; + if (rolesStoreEnabled) { + listener.onResponse(Collections.singletonMap("count", 1)); + } else { + listener.onResponse(Collections.emptyMap()); + } + return Void.TYPE; + }).when(rolesStore).usageStats(any(ActionListener.class)); + } + + private void configureRoleMappingStoreUsage(boolean roleMappingStoreEnabled) { + doAnswer(invocationOnMock -> { + ActionListener> listener = (ActionListener) invocationOnMock.getArguments()[0]; + if (roleMappingStoreEnabled) { + final Map map = new HashMap<>(); + map.put("size", 12L); + map.put("enabled", 10L); + listener.onResponse(map); + } else { + listener.onResponse(Collections.emptyMap()); + } + return Void.TYPE; + }).when(roleMappingStore).usageStats(any(ActionListener.class)); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java index 276d8a333f796..57db60051194e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java @@ -251,7 +251,7 @@ public void testToXContentWithTemplates() throws Exception { public void testSerialization() throws Exception { final ExpressionRoleMapping original = randomRoleMapping(true); - final Version version = VersionUtils.randomVersionBetween(random(), Version.V_8_0_0, null); + final Version version = VersionUtils.randomVersionBetween(random(), Version.V_7_1_0, null); BytesStreamOutput output = new BytesStreamOutput(); output.setVersion(version); original.writeTo(output); diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java index 6d6231bf4303f..52aff352ac182 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java @@ -29,6 +29,7 @@ public enum EsType implements SQLType { NESTED( Types.STRUCT), BINARY( Types.VARBINARY), DATE( Types.DATE), + TIME( Types.TIME), DATETIME( Types.TIMESTAMP), IP( Types.VARCHAR), INTERVAL_YEAR( ExtraTypes.INTERVAL_YEAR), diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcDateUtils.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcDateUtils.java index c0f2e6e46ea03..7b8b3ebed4906 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcDateUtils.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcDateUtils.java @@ -10,17 +10,12 @@ import java.sql.Time; import java.sql.Timestamp; import java.time.LocalDate; +import java.time.OffsetTime; import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeFormatterBuilder; -import java.util.Locale; import java.util.function.Function; -import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE; -import static java.time.temporal.ChronoField.HOUR_OF_DAY; -import static java.time.temporal.ChronoField.MILLI_OF_SECOND; -import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; -import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; +import static org.elasticsearch.xpack.sql.proto.StringUtils.ISO_DATE_WITH_MILLIS; +import static org.elasticsearch.xpack.sql.proto.StringUtils.ISO_TIME_WITH_MILLIS; /** * JDBC specific datetime specific utility methods. Because of lack of visibility, this class borrows code @@ -30,29 +25,21 @@ final class JdbcDateUtils { private JdbcDateUtils() {} + // In Java 8 LocalDate.EPOCH is not available, introduced with later Java versions private static final LocalDate EPOCH = LocalDate.of(1970, 1, 1); - static final DateTimeFormatter ISO_WITH_MILLIS = new DateTimeFormatterBuilder() - .parseCaseInsensitive() - .append(ISO_LOCAL_DATE) - .appendLiteral('T') - .appendValue(HOUR_OF_DAY, 2) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2) - .appendFraction(MILLI_OF_SECOND, 3, 3, true) - .appendOffsetId() - .toFormatter(Locale.ROOT); - private static ZonedDateTime asDateTime(String date) { - return ISO_WITH_MILLIS.parse(date, ZonedDateTime::from); + return ISO_DATE_WITH_MILLIS.parse(date, ZonedDateTime::from); } - static long asMillisSinceEpoch(String date) { + static long dateTimeAsMillisSinceEpoch(String date) { return asDateTime(date).toInstant().toEpochMilli(); } + static long timeAsMillisSinceEpoch(String date) { + return ISO_TIME_WITH_MILLIS.parse(date, OffsetTime::from).atDate(EPOCH).toInstant().toEpochMilli(); + } + static Date asDate(String date) { ZonedDateTime zdt = asDateTime(date); return new Date(zdt.toLocalDate().atStartOfDay(zdt.getZone()).toInstant().toEpochMilli()); @@ -63,14 +50,22 @@ static Time asTime(String date) { return new Time(zdt.toLocalTime().atDate(EPOCH).atZone(zdt.getZone()).toInstant().toEpochMilli()); } + static Time timeAsTime(String date) { + OffsetTime ot = ISO_TIME_WITH_MILLIS.parse(date, OffsetTime::from); + return new Time(ot.atDate(EPOCH).toInstant().toEpochMilli()); + } + static Timestamp asTimestamp(long millisSinceEpoch) { return new Timestamp(millisSinceEpoch); } static Timestamp asTimestamp(String date) { - return new Timestamp(asMillisSinceEpoch(date)); + return new Timestamp(dateTimeAsMillisSinceEpoch(date)); } + static Timestamp timeAsTimestamp(String date) { + return new Timestamp(timeAsMillisSinceEpoch(date)); + } /* * Handles the value received as parameter, as either String (a ZonedDateTime formatted in ISO 8601 standard with millis) - * date fields being returned formatted like this. Or a Long value, in case of Histograms. diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java index 9b1fcb48901a7..1d2489fc6d50d 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java @@ -33,8 +33,15 @@ import java.util.function.Function; import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.jdbc.EsType.DATE; +import static org.elasticsearch.xpack.sql.jdbc.EsType.DATETIME; +import static org.elasticsearch.xpack.sql.jdbc.EsType.TIME; import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.asDateTimeField; -import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.asMillisSinceEpoch; +import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.dateTimeAsMillisSinceEpoch; +import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.asTimestamp; +import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.timeAsMillisSinceEpoch; +import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.timeAsTime; +import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.timeAsTimestamp; class JdbcResultSet implements ResultSet, JdbcWrapper { @@ -251,17 +258,20 @@ private Long dateTimeAsMillis(int columnIndex) throws SQLException { // TODO: the B6 appendix of the jdbc spec does mention CHAR, VARCHAR, LONGVARCHAR, DATE, TIMESTAMP as supported // jdbc types that should be handled by getDate and getTime methods. From all of those we support VARCHAR and // TIMESTAMP. Should we consider the VARCHAR conversion as a later enhancement? - if (EsType.DATETIME == type) { + if (DATETIME == type) { // the cursor can return an Integer if the date-since-epoch is small enough, XContentParser (Jackson) will // return the "smallest" data type for numbers when parsing // TODO: this should probably be handled server side if (val == null) { return null; } - return asDateTimeField(val, JdbcDateUtils::asMillisSinceEpoch, Function.identity()); + return asDateTimeField(val, JdbcDateUtils::dateTimeAsMillisSinceEpoch, Function.identity()); } - if (EsType.DATE == type) { - return asMillisSinceEpoch(val.toString()); + if (DATE == type) { + return dateTimeAsMillisSinceEpoch(val.toString()); + } + if (TIME == type) { + return timeAsMillisSinceEpoch(val.toString()); } return val == null ? null : (Long) val; } catch (ClassCastException cce) { @@ -277,10 +287,15 @@ private Date asDate(int columnIndex) throws SQLException { return null; } + EsType type = columnType(columnIndex); + if (type == TIME) { + return new Date(0L); + } + try { + return JdbcDateUtils.asDate(val.toString()); } catch (Exception e) { - EsType type = columnType(columnIndex); throw new SQLException( format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Date", val, type.getName()), e); } @@ -294,11 +309,14 @@ private Time asTime(int columnIndex) throws SQLException { } EsType type = columnType(columnIndex); - if (type == EsType.DATE) { + if (type == DATE) { return new Time(0L); } try { + if (type == TIME) { + return timeAsTime(val.toString()); + } return JdbcDateUtils.asTime(val.toString()); } catch (Exception e) { throw new SQLException( @@ -313,13 +331,16 @@ private Timestamp asTimeStamp(int columnIndex) throws SQLException { return null; } + EsType type = columnType(columnIndex); try { if (val instanceof Number) { - return JdbcDateUtils.asTimestamp(((Number) val).longValue()); + return asTimestamp(((Number) val).longValue()); + } + if (type == TIME) { + return timeAsTimestamp(val.toString()); } - return JdbcDateUtils.asTimestamp(val.toString()); + return asTimestamp(val.toString()); } catch (Exception e) { - EsType type = columnType(columnIndex); throw new SQLException( format(Locale.ROOT, "Unable to convert value [%.128s] of type [%s] to a Timestamp", val, type.getName()), e); } @@ -342,7 +363,7 @@ public Date getDate(String columnLabel, Calendar cal) throws SQLException { @Override public Time getTime(int columnIndex, Calendar cal) throws SQLException { EsType type = columnType(columnIndex); - if (type == EsType.DATE) { + if (type == DATE) { return new Time(0L); } return TypeConverter.convertTime(dateTimeAsMillis(columnIndex), safeCalendar(cal)); diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java index d08496b611e0c..35f8241fb4786 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java @@ -35,6 +35,10 @@ import static java.util.Calendar.MONTH; import static java.util.Calendar.SECOND; import static java.util.Calendar.YEAR; +import static org.elasticsearch.xpack.sql.jdbc.EsType.DATE; +import static org.elasticsearch.xpack.sql.jdbc.EsType.DATETIME; +import static org.elasticsearch.xpack.sql.jdbc.EsType.TIME; +import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.asDateTimeField; /** * Conversion utilities for conversion of JDBC types to Java type and back @@ -214,9 +218,11 @@ static Object convert(Object v, EsType columnType, String typeString) throws SQL case FLOAT: return floatValue(v); // Float might be represented as string for infinity and NaN values case DATE: - return JdbcDateUtils.asDateTimeField(v, JdbcDateUtils::asDate, Date::new); + return asDateTimeField(v, JdbcDateUtils::asDate, Date::new); + case TIME: + return asDateTimeField(v, JdbcDateUtils::asTime, Time::new); case DATETIME: - return JdbcDateUtils.asDateTimeField(v, JdbcDateUtils::asTimestamp, Timestamp::new); + return asDateTimeField(v, JdbcDateUtils::asTimestamp, Timestamp::new); case INTERVAL_YEAR: case INTERVAL_MONTH: case INTERVAL_YEAR_TO_MONTH: @@ -471,25 +477,34 @@ private static Double asDouble(Object val, EsType columnType, String typeString) } private static Date asDate(Object val, EsType columnType, String typeString) throws SQLException { - if (columnType == EsType.DATETIME || columnType == EsType.DATE) { - return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asDate, Date::new); + if (columnType == DATETIME || columnType == DATE) { + return asDateTimeField(val, JdbcDateUtils::asDate, Date::new); + } + if (columnType == TIME) { + return new Date(0L); } return failConversion(val, columnType, typeString, Date.class); } private static Time asTime(Object val, EsType columnType, String typeString) throws SQLException { - if (columnType == EsType.DATETIME) { - return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asTime, Time::new); + if (columnType == DATETIME) { + return asDateTimeField(val, JdbcDateUtils::asTime, Time::new); + } + if (columnType == TIME) { + return asDateTimeField(val, JdbcDateUtils::timeAsTime, Time::new); } - if (columnType == EsType.DATE) { + if (columnType == DATE) { return new Time(0L); } return failConversion(val, columnType, typeString, Time.class); } private static Timestamp asTimestamp(Object val, EsType columnType, String typeString) throws SQLException { - if (columnType == EsType.DATETIME || columnType == EsType.DATE) { - return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asTimestamp, Timestamp::new); + if (columnType == DATETIME || columnType == DATE) { + return asDateTimeField(val, JdbcDateUtils::asTimestamp, Timestamp::new); + } + if (columnType == TIME) { + return asDateTimeField(val, JdbcDateUtils::timeAsTimestamp, Timestamp::new); } return failConversion(val, columnType, typeString, Timestamp.class); } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java index 1a47bb0add85b..bf34558b404f5 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java @@ -72,6 +72,18 @@ public void testDateTimes() throws IOException { "datetime", "1119-01-15T12:37:29.000Z", 24); assertQuery("SELECT CAST(CAST('-26853765751000' AS BIGINT) AS DATETIME)", "CAST(CAST('-26853765751000' AS BIGINT) AS DATETIME)", "datetime", "1119-01-15T12:37:29.000Z", 24); + + assertQuery("SELECT CAST('2019-01-14' AS DATE)", "CAST('2019-01-14' AS DATE)", + "date", "2019-01-14T00:00:00.000Z", 24); + assertQuery("SELECT CAST(-26853765751000 AS DATE)", "CAST(-26853765751000 AS DATE)", + "date", "1119-01-15T00:00:00.000Z", 24); + + assertQuery("SELECT CAST('12:29:25.123Z' AS TIME)", "CAST('12:29:25.123Z' AS TIME)", + "time", "12:29:25.123Z", 18); + assertQuery("SELECT CAST('12:29:25.123456789+05:00' AS TIME)", "CAST('12:29:25.123456789+05:00' AS TIME)", + "time", "12:29:25.123+05:00", 18); + assertQuery("SELECT CAST(-26853765751000 AS TIME)", "CAST(-26853765751000 AS TIME)", + "time", "12:37:29.000Z", 18); } public void testIPs() throws IOException { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java index 7db6faefb57c3..7f5077f15a885 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.sql.qa.jdbc; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.apache.logging.log4j.Logger; import org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.CsvTestCase; diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java index bcd3d4073eabe..8931fe0264e9d 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.sql.qa.jdbc; import com.carrotsearch.hppc.IntObjectHashMap; - import org.apache.logging.log4j.Logger; import org.elasticsearch.xpack.sql.jdbc.EsType; import org.elasticsearch.xpack.sql.proto.StringUtils; @@ -208,6 +207,9 @@ private static void doAssertResultSetData(ResultSet expected, ResultSet actual, case "Date": columnClassName = "java.sql.Date"; break; + case "Time": + columnClassName = "java.sql.Time"; + break; case "Timestamp": columnClassName = "java.sql.Timestamp"; break; diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java index b8cd81e39f545..3d65769a9b8d7 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java @@ -1151,6 +1151,35 @@ public void testGetDateType() throws Exception { assertEquals(expectedTimestamp, results.getObject("date", java.sql.Timestamp.class)); }); } + + public void testGetTimeType() throws Exception { + createIndex("test"); + updateMapping("test", builder -> builder.startObject("test_date").field("type", "date").endObject()); + + // 2018-03-12 17:20:30.123 UTC + Long timeInMillis = 1520875230123L; + index("test", "1", builder -> builder.field("test_date", timeInMillis)); + + // UTC +10 hours + String timeZoneId1 = "Etc/GMT-10"; + + doWithQueryAndTimezone("SELECT CAST(test_date AS TIME) as time FROM test", timeZoneId1, results -> { + results.next(); + + java.sql.Date expectedDate = new java.sql.Date(0L); + assertEquals(expectedDate, results.getDate("time")); + assertEquals(expectedDate, results.getObject("time", java.sql.Date.class)); + + java.sql.Time expectedTime = asTime(timeInMillis, ZoneId.of("Etc/GMT-10")); + assertEquals(expectedTime, results.getTime("time")); + assertEquals(expectedTime, results.getObject("time", java.sql.Time.class)); + + java.sql.Timestamp expectedTimestamp = new java.sql.Timestamp(expectedTime.getTime()); + assertEquals(expectedTimestamp, results.getTimestamp("time")); + assertEquals(expectedTimestamp, results.getObject("time", java.sql.Timestamp.class)); + }); + } + public void testValidGetObjectCalls() throws Exception { createIndex("test"); updateMappingForNumericValuesTests("test"); diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index 93a693a69b476..6c57f19cbbece 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -2495,3 +2495,25 @@ SELECT first_name, last_name FROM emp WHERE last_name NOT LIKE '%a%' AND first_n Anoosh |Peyn Arumugam |Ossenbruggen ; + +simpleLikeOperator +// tag::simpleLike +SELECT author, name FROM library WHERE name LIKE 'Dune%'; + + author | name +---------------+--------------- +Frank Herbert |Dune +Frank Herbert |Dune Messiah +// end::simpleLike +; + +simpleRLikeOperator +// tag::simpleRLike +SELECT author, name FROM library WHERE name RLIKE 'Child.* Dune'; + + author | name +---------------+---------------- +Frank Herbert |Children of Dune +// end::simpleRLike +; + diff --git a/x-pack/plugin/sql/qa/src/main/resources/time.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/time.csv-spec new file mode 100644 index 0000000000000..cba8dd29a75a1 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/time.csv-spec @@ -0,0 +1,83 @@ +// +// TIME +// + +// AwaitsFix: https://github.com/elastic/elasticsearch/issues/40717 +//timeExtractTimeParts +//SELECT +//SECOND(CAST(birth_date AS TIME)) d, +//MINUTE(CAST(birth_date AS TIME)) m, +//HOUR(CAST(birth_date AS TIME)) h +//FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; +// +// d:i | m:i | h:i +//0 |0 |0 +//0 |0 |0 +//0 |0 |0 +//0 |0 |0 +//0 |0 |0 +//0 |0 |0 +//0 |0 |0 +//0 |0 |0 +//0 |0 |0 +//; + +timeAsFilter +SELECT birth_date, last_name FROM "test_emp" WHERE birth_date::TIME = CAST('00:00:00' AS TIME) ORDER BY emp_no LIMIT 5; + + birth_date:ts | last_name:s +1953-09-02 00:00:00Z | Facello +1964-06-02 00:00:00Z | Simmel +1959-12-03 00:00:00Z | Bamford +1954-05-01 00:00:00Z | Koblick +1955-01-21 00:00:00Z | Maliniak +; + +timeAsFilter_NoMatch +SELECT count(*) FROM "test_emp" WHERE birth_date::TIME = CAST('12:34:56.789' AS TIME); + + count(*):l +0 +; + +timeAsOrderBy +SELECT last_name FROM "test_emp" ORDER BY birth_date::TIME, emp_no LIMIT 5; + +last_name:s +Meriste +Lenart +Stamatiou +Tzvieli +Casley +; + +timeAndFunctionAsGroupingKey +SELECT HOUR(CAST(birth_date AS TIME)) AS m, CAST(SUM(emp_no) AS INT) s FROM test_emp GROUP BY m ORDER BY m LIMIT 5; + + m:i | s:i +null |100445 +0 |904605 +; + +// AwaitsFix: https://github.com/elastic/elasticsearch/issues/40717 +//timeAsHavingFilter +//SELECT MINUTE_OF_HOUR(MAX(birth_date)::TIME + INTERVAL 10 MINUTES) as minute, gender FROM test_emp GROUP BY gender HAVING CAST(MAX(birth_date) AS TIME) = CAST('00:00:00.000' AS TIME) ORDER BY gender; +// +//minute:i | gender:s +//10 | null +//10 | F +//10 | M +//; + +timeAsHavingFilterNoMatch +SELECT MINUTE_OF_HOUR(MAX(birth_date)::TIME) as minute, gender FROM test_emp GROUP BY gender HAVING CAST(MAX(birth_date) AS TIME) > CAST('00:00:00.000' AS TIME); + +minute:i | gender:s +; + +timeAndInterval +SELECT HOUR(CAST('10:11:12.345' AS TIME) + INTERVAL '20' HOURS) AS h, SECOND(INTERVAL '40' SECONDS + CAST('10:11:12.345' AS TIME)) AS m; + +h:i | m:i +6 | 52 +; diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java index 93c2e2f743c97..bd90354e5f98c 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/StringUtils.java @@ -8,6 +8,7 @@ import java.sql.Timestamp; import java.time.Duration; +import java.time.OffsetTime; import java.time.Period; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; @@ -26,7 +27,7 @@ public final class StringUtils { public static final String EMPTY = ""; - private static final DateTimeFormatter ISO_WITH_MILLIS = new DateTimeFormatterBuilder() + public static final DateTimeFormatter ISO_DATE_WITH_MILLIS = new DateTimeFormatterBuilder() .parseCaseInsensitive() .append(ISO_LOCAL_DATE) .appendLiteral('T') @@ -39,6 +40,17 @@ public final class StringUtils { .appendOffsetId() .toFormatter(Locale.ROOT); + public static final DateTimeFormatter ISO_TIME_WITH_MILLIS = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(HOUR_OF_DAY, 2) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .appendOffsetId() + .toFormatter(Locale.ROOT); + private static final int SECONDS_PER_MINUTE = 60; private static final int SECONDS_PER_HOUR = SECONDS_PER_MINUTE * 60; private static final int SECONDS_PER_DAY = SECONDS_PER_HOUR * 24; @@ -49,16 +61,18 @@ public static String toString(Object value) { if (value == null) { return "null"; } - + + if (value instanceof ZonedDateTime) { + return ((ZonedDateTime) value).format(ISO_DATE_WITH_MILLIS); + } + if (value instanceof OffsetTime) { + return ((OffsetTime) value).format(ISO_TIME_WITH_MILLIS); + } if (value instanceof Timestamp) { Timestamp ts = (Timestamp) value; return ts.toInstant().toString(); } - if (value instanceof ZonedDateTime) { - return ((ZonedDateTime) value).format(ISO_WITH_MILLIS); - } - // handle intervals // YEAR/MONTH/YEAR TO MONTH -> YEAR TO MONTH if (value instanceof Period) { @@ -112,4 +126,4 @@ public static String toString(Object value) { private static String indent(long timeUnit) { return timeUnit < 10 ? "0" + timeUnit : Long.toString(timeUnit); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index 47c53e772d5dd..bade2d44c8af3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -298,7 +298,8 @@ private static boolean checkGroupBy(LogicalPlan p, Set localFailures, return checkGroupByInexactField(p, localFailures) && checkGroupByAgg(p, localFailures, resolvedFunctions) && checkGroupByOrder(p, localFailures, groupingFailures) - && checkGroupByHaving(p, localFailures, groupingFailures, resolvedFunctions); + && checkGroupByHaving(p, localFailures, groupingFailures, resolvedFunctions) + && checkGroupByTime(p, localFailures); } // check whether an orderBy failed or if it occurs on a non-key @@ -473,14 +474,30 @@ private static boolean checkGroupByInexactField(LogicalPlan p, Set loca a.groupings().forEach(e -> e.forEachUp(c -> { EsField.Exact exact = c.getExactInfo(); if (exact.hasExact() == false) { - localFailures.add(fail(c, "Field of data type [" + c.dataType().typeName + "] cannot be used for grouping; " + - exact.errorMsg())); + localFailures.add(fail(c, "Field [" + c.sourceText() + "] of data type [" + c.dataType().typeName + "] " + + "cannot be used for grouping; " + exact.errorMsg())); } }, FieldAttribute.class)); } return true; } + private static boolean checkGroupByTime(LogicalPlan p, Set localFailures) { + if (p instanceof Aggregate) { + Aggregate a = (Aggregate) p; + + // TIME data type is not allowed for grouping key + // https://github.com/elastic/elasticsearch/issues/40639 + a.groupings().forEach(f -> { + if (f.dataType().isTimeBased()) { + localFailures.add(fail(f, "Function [" + f.sourceText() + "] with data type [" + f.dataType().typeName + + "] " + "cannot be used for grouping")); + } + }); + } + return true; + } + // check whether plain columns specified in an agg are mentioned in the group-by private static boolean checkGroupByAgg(LogicalPlan p, Set localFailures, Map functions) { if (p instanceof Aggregate) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java index b541df7e81a81..14d50f7c9a09b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java @@ -75,6 +75,8 @@ public Object extract(Bucket bucket) { Object value = agg.getHits().getAt(0).getFields().values().iterator().next().getValue(); if (fieldDataType.isDateBased()) { return DateUtils.asDateTime(Long.parseLong(value.toString()), zoneId); + } else if (fieldDataType.isTimeBased()) { + return DateUtils.asTimeOnly(Long.parseLong(value.toString()), zoneId); } else { return value; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java index 811cc299ccb97..cb86e2742b2d8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java @@ -88,7 +88,7 @@ public EsField.Exact getExactInfo() { public FieldAttribute exactAttribute() { EsField exactField = field.getExactField(); if (exactField.equals(field) == false) { - return innerField(field.getExactField()); + return innerField(exactField); } return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java index 61bc8ed44a9a8..f6e1e3ad8be69 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java @@ -43,8 +43,18 @@ public static TypeResolution isDate(Expression e, String operationName, ParamOrd return isType(e, DataType::isDateBased, operationName, paramOrd, "date", "datetime"); } + public static TypeResolution isDateOrTime(Expression e, String operationName, ParamOrdinal paramOrd) { + return isType(e, DataType::isDateOrTimeBased, operationName, paramOrd, "date", "time", "datetime"); + } + public static TypeResolution isNumericOrDate(Expression e, String operationName, ParamOrdinal paramOrd) { - return isType(e, dt -> dt.isNumeric() || dt.isDateBased(), operationName, paramOrd, "date", "datetime", "numeric"); + return isType(e, dt -> dt.isNumeric() || dt.isDateBased(), operationName, paramOrd, + "date", "datetime", "numeric"); + } + + public static TypeResolution isNumericOrDateOrTime(Expression e, String operationName, ParamOrdinal paramOrd) { + return isType(e, dt -> dt.isNumeric() || dt.isDateOrTimeBased(), operationName, paramOrd, + "date", "time", "datetime", "numeric"); } public static TypeResolution isExact(Expression e, String message) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java index 5827083343a0f..eaf2d798f6d2d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java @@ -14,7 +14,7 @@ import java.util.List; import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isExact; -import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isNumericOrDate; +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isNumericOrDateOrTime; /** * Find the maximum value in matching documents. @@ -50,7 +50,7 @@ protected TypeResolution resolveType() { if (field().dataType().isString()) { return isExact(field(), sourceText(), ParamOrdinal.DEFAULT); } else { - return isNumericOrDate(field(), sourceText(), ParamOrdinal.DEFAULT); + return isNumericOrDateOrTime(field(), sourceText(), ParamOrdinal.DEFAULT); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java index e64774fe8e720..f195517335883 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java @@ -14,7 +14,7 @@ import java.util.List; import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isExact; -import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isNumericOrDate; +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isNumericOrDateOrTime; /** * Find the minimum value in matched documents. @@ -53,7 +53,7 @@ protected TypeResolution resolveType() { if (field().dataType().isString()) { return isExact(field(), sourceText(), ParamOrdinal.DEFAULT); } else { - return isNumericOrDate(field(), sourceText(), ParamOrdinal.DEFAULT); + return isNumericOrDateOrTime(field(), sourceText(), ParamOrdinal.DEFAULT); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java index f5e1a3ece38e9..f5ceb26696065 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor; @@ -78,6 +79,7 @@ public static List getNamedWriteables() { // datetime entries.add(new Entry(Processor.class, DateTimeProcessor.NAME, DateTimeProcessor::new)); + entries.add(new Entry(Processor.class, TimeProcessor.NAME, TimeProcessor::new)); entries.add(new Entry(Processor.class, NamedDateTimeProcessor.NAME, NamedDateTimeProcessor::new)); entries.add(new Entry(Processor.class, NonIsoDateTimeProcessor.NAME, NonIsoDateTimeProcessor::new)); entries.add(new Entry(Processor.class, QuarterProcessor.NAME, QuarterProcessor::new)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java index cae78a42e55e9..bda86183fff02 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java @@ -13,13 +13,12 @@ import org.elasticsearch.xpack.sql.tree.Source; import java.time.ZoneId; -import java.time.ZonedDateTime; import java.util.Objects; import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isDate; abstract class BaseDateTimeFunction extends UnaryScalarFunction { - + private final ZoneId zoneId; BaseDateTimeFunction(Source source, Expression field, ZoneId zoneId) { @@ -50,17 +49,9 @@ public boolean foldable() { @Override public Object fold() { - ZonedDateTime folded = (ZonedDateTime) field().fold(); - if (folded == null) { - return null; - } - - return doFold(folded.withZoneSameInstant(zoneId)); + return makeProcessor().process(field().fold()); } - protected abstract Object doFold(ZonedDateTime dateTime); - - @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { @@ -68,7 +59,7 @@ public boolean equals(Object obj) { } BaseDateTimeFunction other = (BaseDateTimeFunction) obj; return Objects.equals(other.field(), field()) - && Objects.equals(other.zoneId(), zoneId()); + && Objects.equals(other.zoneId(), zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java index 608057cf235d5..ddab74aa927aa 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java @@ -18,11 +18,11 @@ public abstract class BaseDateTimeProcessor implements Processor { private final ZoneId zoneId; - + BaseDateTimeProcessor(ZoneId zoneId) { this.zoneId = zoneId; } - + BaseDateTimeProcessor(StreamInput in) throws IOException { zoneId = ZoneId.of(in.readString()); } @@ -31,7 +31,7 @@ public abstract class BaseDateTimeProcessor implements Processor { public void writeTo(StreamOutput out) throws IOException { out.writeString(zoneId.getId()); } - + ZoneId zoneId() { return zoneId; } @@ -43,11 +43,11 @@ public Object process(Object input) { } if (!(input instanceof ZonedDateTime)) { - throw new SqlIllegalArgumentException("A date is required; received {}", input); + throw new SqlIllegalArgumentException("A [date], a [time] or a [datetime] is required; received {}", input); } return doProcess(((ZonedDateTime) input).withZoneSameInstant(zoneId)); } abstract Object doProcess(ZonedDateTime dateTime); -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java index 9a55548c921bb..d314056ea64e8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java @@ -16,6 +16,7 @@ import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.temporal.ChronoField; +import java.time.temporal.Temporal; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; @@ -28,17 +29,12 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { this.extractor = extractor; } - @Override - protected Object doFold(ZonedDateTime dateTime) { - return dateTimeChrono(dateTime, extractor.chronoField()); - } - public static Integer dateTimeChrono(ZonedDateTime dateTime, String tzId, String chronoName) { ZonedDateTime zdt = dateTime.withZoneSameInstant(ZoneId.of(tzId)); return dateTimeChrono(zdt, ChronoField.valueOf(chronoName)); } - private static Integer dateTimeChrono(ZonedDateTime dateTime, ChronoField field) { + protected static Integer dateTimeChrono(Temporal dateTime, ChronoField field) { return Integer.valueOf(dateTime.get(field)); } @@ -68,4 +64,8 @@ public DataType dataType() { // used for applying ranges public abstract String dateTimeFormat(); -} \ No newline at end of file + + protected DateTimeExtractor extractor() { + return extractor; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java index 5357462fdd6a3..4a39914729951 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.time.OffsetTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.temporal.ChronoField; @@ -38,6 +39,10 @@ public int extract(ZonedDateTime dt) { return dt.get(field); } + public int extract(OffsetTime time) { + return time.get(field); + } + public ChronoField chronoField() { return field; } @@ -95,4 +100,4 @@ public boolean equals(Object obj) { public String toString() { return extractor.toString(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java index 32de1179965f6..c15a730e25b3f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java @@ -7,15 +7,15 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; +import org.elasticsearch.xpack.sql.tree.Source; import java.time.ZoneId; /** * Extract the hour of the day from a datetime. */ -public class HourOfDay extends DateTimeFunction { +public class HourOfDay extends TimeFunction { public HourOfDay(Source source, Expression field, ZoneId zoneId) { super(source, field, zoneId, DateTimeExtractor.HOUR_OF_DAY); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java index 1ef450b3f650d..823de40034feb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java @@ -7,15 +7,15 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; +import org.elasticsearch.xpack.sql.tree.Source; import java.time.ZoneId; /** * Extract the minute of the day from a datetime. */ -public class MinuteOfDay extends DateTimeFunction { +public class MinuteOfDay extends TimeFunction { public MinuteOfDay(Source source, Expression field, ZoneId zoneId) { super(source, field, zoneId, DateTimeExtractor.MINUTE_OF_DAY); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java index 9c4cf4884b3f6..1136b858a7227 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java @@ -7,15 +7,15 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; +import org.elasticsearch.xpack.sql.tree.Source; import java.time.ZoneId; /** * Exract the minute of the hour from a datetime. */ -public class MinuteOfHour extends DateTimeFunction { +public class MinuteOfHour extends TimeFunction { public MinuteOfHour(Source source, Expression field, ZoneId zoneId) { super(source, field, zoneId, DateTimeExtractor.MINUTE_OF_HOUR); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java index b5d7305d2bbd2..35397df5ef4aa 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.sql.util.StringUtils; import java.time.ZoneId; -import java.time.ZonedDateTime; import java.util.Locale; import static java.lang.String.format; @@ -33,11 +32,6 @@ abstract class NamedDateTimeFunction extends BaseDateTimeFunction { this.nameExtractor = nameExtractor; } - @Override - protected Object doFold(ZonedDateTime dateTime) { - return nameExtractor.extract(dateTime); - } - @Override public ScriptTemplate scriptWithField(FieldAttribute field) { return new ScriptTemplate( @@ -58,4 +52,4 @@ protected Processor makeProcessor() { public DataType dataType() { return DataType.KEYWORD; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java index 4d5fb4ad91efd..1aee57ae80bc3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.sql.util.StringUtils; import java.time.ZoneId; -import java.time.ZonedDateTime; import java.util.Locale; import static java.lang.String.format; @@ -33,11 +32,6 @@ abstract class NonIsoDateTimeFunction extends BaseDateTimeFunction { this.extractor = extractor; } - @Override - protected Object doFold(ZonedDateTime dateTime) { - return extractor.extract(dateTime); - } - @Override public ScriptTemplate scriptWithField(FieldAttribute field) { return new ScriptTemplate( @@ -58,4 +52,4 @@ protected Processor makeProcessor() { public DataType dataType() { return DataType.INTEGER; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java index 4837b7c4a8603..275e7181bc312 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java @@ -10,14 +10,12 @@ import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.time.ZoneId; -import java.time.ZonedDateTime; -import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor.quarter; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public class Quarter extends BaseDateTimeFunction { @@ -26,11 +24,6 @@ public Quarter(Source source, Expression field, ZoneId zoneId) { super(source, field, zoneId); } - @Override - protected Object doFold(ZonedDateTime dateTime) { - return quarter(dateTime); - } - @Override public ScriptTemplate scriptWithField(FieldAttribute field) { return new ScriptTemplate(formatTemplate("{sql}.quarter(doc[{}].value, {})"), @@ -60,4 +53,4 @@ protected Processor makeProcessor() { public DataType dataType() { return DataType.INTEGER; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java index 4b7c354f412d9..fb83191f5bcbd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java @@ -7,15 +7,15 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; +import org.elasticsearch.xpack.sql.tree.Source; import java.time.ZoneId; /** * Extract the second of the minute from a datetime. */ -public class SecondOfMinute extends DateTimeFunction { +public class SecondOfMinute extends TimeFunction { public SecondOfMinute(Source source, Expression field, ZoneId zoneId) { super(source, field, zoneId, DateTimeExtractor.SECOND_OF_MINUTE); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/TimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/TimeFunction.java new file mode 100644 index 0000000000000..857d8fada5bd4 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/TimeFunction.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.time.OffsetTime; +import java.time.ZoneId; +import java.time.temporal.ChronoField; + +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isDateOrTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.asTimeAtZone; + +public abstract class TimeFunction extends DateTimeFunction { + + TimeFunction(Source source, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { + super(source, field, zoneId, extractor); + } + + public static Integer dateTimeChrono(OffsetTime time, String tzId, String chronoName) { + return dateTimeChrono(asTimeAtZone(time, ZoneId.of(tzId)), ChronoField.valueOf(chronoName)); + } + + @Override + protected TypeResolution resolveType() { + return isDateOrTime(field(), sourceText(), Expressions.ParamOrdinal.DEFAULT); + } + + @Override + protected Processor makeProcessor() { + return new TimeProcessor(extractor(), zoneId()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/TimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/TimeProcessor.java new file mode 100644 index 0000000000000..a263d83dcb195 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/TimeProcessor.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; +import java.time.OffsetTime; +import java.time.ZoneId; +import java.util.Objects; + +import static org.elasticsearch.xpack.sql.util.DateUtils.asTimeAtZone; + +public class TimeProcessor extends DateTimeProcessor { + + + public static final String NAME = "time"; + + public TimeProcessor(DateTimeExtractor extractor, ZoneId zoneId) { + super(extractor, zoneId); + } + + public TimeProcessor(StreamInput in) throws IOException { + super(in); + } + + @Override + public Object process(Object input) { + if (input instanceof OffsetTime) { + return doProcess(asTimeAtZone((OffsetTime) input, zoneId())); + } + return super.process(input); + } + + private Object doProcess(OffsetTime time) { + return extractor().extract(time); + } + + @Override + public int hashCode() { + return Objects.hash(extractor(), zoneId()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + TimeProcessor other = (TimeProcessor) obj; + return Objects.equals(extractor(), other.extractor()) + && Objects.equals(zoneId(), other.zoneId()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index 4d77243230cf7..570154026b807 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor.BinaryOptionalMathOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; @@ -41,6 +42,7 @@ import org.elasticsearch.xpack.sql.util.StringUtils; import java.time.Duration; +import java.time.OffsetTime; import java.time.Period; import java.time.ZonedDateTime; import java.util.List; @@ -316,6 +318,9 @@ public static Integer dateTimeChrono(Object dateTime, String tzId, String chrono if (dateTime == null || tzId == null || chronoName == null) { return null; } + if (dateTime instanceof OffsetTime) { + return TimeFunction.dateTimeChrono((OffsetTime) dateTime, tzId, chronoName); + } return DateTimeFunction.dateTimeChrono(asDateTime(dateTime), tzId, chronoName); } @@ -396,6 +401,10 @@ public static IntervalYearMonth intervalYearMonth(String text, String typeName) return new IntervalYearMonth(Period.parse(text), DataType.fromTypeName(typeName)); } + public static OffsetTime asTime(String time) { + return OffsetTime.parse(time); + } + // // String functions // diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java index 3832fbda2217a..b24ec56727d64 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.DateUtils; +import java.time.OffsetTime; import java.time.ZonedDateTime; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; @@ -80,12 +81,19 @@ default ScriptTemplate scriptWithFoldable(Expression foldable) { return new ScriptTemplate(processScript("{sql}.intervalYearMonth({},{})"), paramsBuilder().variable(iym.interval().toString()).variable(iym.dataType().name()).build(), dataType()); - } else if (fold instanceof IntervalDayTime) { + } + if (fold instanceof IntervalDayTime) { IntervalDayTime idt = (IntervalDayTime) fold; return new ScriptTemplate(processScript("{sql}.intervalDayTime({},{})"), paramsBuilder().variable(idt.interval().toString()).variable(idt.dataType().name()).build(), dataType()); } + if (fold instanceof OffsetTime) { + OffsetTime ot = (OffsetTime) fold; + return new ScriptTemplate(processScript("{sql}.asTime({})"), + paramsBuilder().variable(ot.toString()).build(), + dataType()); + } return new ScriptTemplate(processScript("{}"), paramsBuilder().variable(fold).build(), diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java index d66fb7df2ba5d..33a4f8c0e5603 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Arithmetics.java @@ -6,14 +6,24 @@ package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic; import java.time.Duration; +import java.time.OffsetTime; import java.time.Period; -import java.time.ZonedDateTime; +import java.time.temporal.Temporal; + +import static org.elasticsearch.xpack.sql.util.DateUtils.DAY_IN_MILLIS; /** * Arithmetic operation using the type widening rules of the JLS 5.6.2 namely * widen to double or float or long or int in this order. */ -public abstract class Arithmetics { +public final class Arithmetics { + + private Arithmetics() {} + + private enum IntervalOperation { + ADD, + SUB + } static Number add(Number l, Number r) { if (l == null || r == null) { @@ -33,20 +43,12 @@ static Number add(Number l, Number r) { return Integer.valueOf(Math.addExact(l.intValue(), r.intValue())); } - static ZonedDateTime add(ZonedDateTime l, Period r) { - if (l == null || r == null) { - return null; - } - - return l.plus(r); + static Temporal add(Temporal l, Period r) { + return periodArithmetics(l, r, IntervalOperation.ADD); } - static ZonedDateTime add(ZonedDateTime l, Duration r) { - if (l == null || r == null) { - return null; - } - - return l.plus(r); + static Temporal add(Temporal l, Duration r) { + return durationArithmetics(l, r, IntervalOperation.ADD); } static Number sub(Number l, Number r) { @@ -67,20 +69,12 @@ static Number sub(Number l, Number r) { return Integer.valueOf(Math.subtractExact(l.intValue(), r.intValue())); } - static ZonedDateTime sub(ZonedDateTime l, Period r) { - if (l == null || r == null) { - return null; - } - - return l.minus(r); + static Temporal sub(Temporal l, Period r) { + return periodArithmetics(l, r, IntervalOperation.SUB); } - static ZonedDateTime sub(ZonedDateTime l, Duration r) { - if (l == null || r == null) { - return null; - } - - return l.minus(r); + static Temporal sub(Temporal l, Duration r) { + return durationArithmetics(l, r, IntervalOperation.SUB); } static Number mul(Number l, Number r) { @@ -162,4 +156,36 @@ static Number negate(Number n) { return Integer.valueOf(Math.negateExact(n.intValue())); } + + private static Temporal periodArithmetics(Temporal l, Period r, IntervalOperation operation) { + if (l == null || r == null) { + return null; + } + + if (l instanceof OffsetTime) { + return l; + } + + if (operation == IntervalOperation.ADD) { + return l.plus(r); + } else { + return l.minus(r); + } + } + + private static Temporal durationArithmetics(Temporal l, Duration r, IntervalOperation operation) { + if (l == null || r == null) { + return null; + } + + if (l instanceof OffsetTime) { + r = Duration.ofMillis(r.toMillis() % DAY_IN_MILLIS); + } + + if (operation == IntervalOperation.ADD) { + return l.plus(r); + } else { + return l.minus(r); + } + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java index a0fd57e30d0ca..b6bfaa4acb63d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java @@ -17,7 +17,9 @@ import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import java.io.IOException; +import java.time.OffsetTime; import java.time.ZonedDateTime; +import java.time.temporal.Temporal; import java.util.function.BiFunction; public class BinaryArithmeticProcessor extends FunctionalBinaryProcessor { @@ -41,17 +43,17 @@ public enum BinaryArithmeticOperation implements PredicateBiFunction) { + if ((r instanceof ZonedDateTime || r instanceof OffsetTime) && l instanceof Interval) { throw new SqlIllegalArgumentException("Cannot subtract a date from an interval; do you mean the reverse?"); } @@ -181,4 +183,4 @@ protected Object doProcess(Object left, Object right) { // this should not occur throw new SqlIllegalArgumentException("Cannot perform arithmetic operation due to arguments"); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java index cad2d7ffa625a..ee3ca6aa6773b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java @@ -34,7 +34,7 @@ protected Sub replaceChildren(Expression newLeft, Expression newRight) { @Override protected TypeResolution resolveWithIntervals() { - if (right().dataType().isDateBased() && DataTypes.isInterval(left().dataType())) { + if ((right().dataType().isDateOrTimeBased()) && DataTypes.isInterval(left().dataType())) { return new TypeResolution(format(null, "Cannot subtract a {}[{}] from an interval[{}]; do you mean the reverse?", right().dataType().typeName, right().source().text(), left().source().text())); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index c3d5ba2228467..ee5ff0300b22b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -114,7 +114,6 @@ import org.elasticsearch.xpack.sql.util.StringUtils; import java.time.Duration; -import java.time.LocalTime; import java.time.Period; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAmount; @@ -124,11 +123,11 @@ import java.util.Map; import java.util.StringJoiner; -import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.type.DataTypeConversion.conversionFor; import static org.elasticsearch.xpack.sql.util.DateUtils.asDateOnly; +import static org.elasticsearch.xpack.sql.util.DateUtils.asTimeOnly; import static org.elasticsearch.xpack.sql.util.DateUtils.ofEscapedLiteral; abstract class ExpressionBuilder extends IdentifierBuilder { @@ -768,14 +767,11 @@ public Literal visitTimeEscapedLiteral(TimeEscapedLiteralContext ctx) { Source source = source(ctx); // parse HH:mm:ss - LocalTime lt = null; try { - lt = LocalTime.parse(string, ISO_LOCAL_TIME); + return new Literal(source, asTimeOnly(string), DataType.TIME); } catch (DateTimeParseException ex) { throw new ParsingException(source, "Invalid time received; {}", ex.getMessage()); } - - throw new SqlIllegalArgumentException("Time (only) literals are not supported; a date component is required as well"); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 1fdd27d9b0b2d..8495b0269eb84 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -24,9 +24,9 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.ExtendedStats; import org.elasticsearch.xpack.sql.expression.function.aggregate.First; import org.elasticsearch.xpack.sql.expression.function.aggregate.Last; -import org.elasticsearch.xpack.sql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.sql.expression.function.aggregate.MatrixStats; import org.elasticsearch.xpack.sql.expression.function.aggregate.Max; +import org.elasticsearch.xpack.sql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.sql.expression.function.aggregate.Min; import org.elasticsearch.xpack.sql.expression.function.aggregate.PercentileRanks; import org.elasticsearch.xpack.sql.expression.function.aggregate.Percentiles; @@ -291,7 +291,7 @@ else if (exp instanceof GroupingFunction) { if (h.dataType() == DATE) { intervalAsMillis = DateUtils.minDayInterval(intervalAsMillis); } - // TODO: set timezone + if (field instanceof FieldAttribute) { key = new GroupByDateHistogram(aggId, nameOf(field), intervalAsMillis, h.zoneId()); } else if (field instanceof Function) { @@ -470,7 +470,6 @@ private static String topAggsField(AggregateFunction af, Expression e) { af.nodeString()); } - // TODO: need to optimize on ngram // TODO: see whether escaping is needed @SuppressWarnings("rawtypes") static class Likes extends ExpressionTranslator { @@ -478,34 +477,23 @@ static class Likes extends ExpressionTranslator { @Override protected QueryTranslation asQuery(RegexMatch e, boolean onAggs) { Query q = null; - boolean inexact = true; - String target = null; + String targetFieldName = null; if (e.field() instanceof FieldAttribute) { - target = nameOf(((FieldAttribute) e.field()).exactAttribute()); + targetFieldName = nameOf(((FieldAttribute) e.field()).exactAttribute()); } else { - throw new SqlIllegalArgumentException("Scalar function ({}) not allowed (yet) as arguments for LIKE", + throw new SqlIllegalArgumentException("Scalar function [{}] not allowed (yet) as argument for " + e.functionName(), Expressions.name(e.field())); } if (e instanceof Like) { LikePattern p = ((Like) e).pattern(); - if (inexact) { - q = new QueryStringQuery(e.source(), p.asLuceneWildcard(), target); - } - else { - q = new WildcardQuery(e.source(), nameOf(e.field()), p.asLuceneWildcard()); - } + q = new WildcardQuery(e.source(), targetFieldName, p.asLuceneWildcard()); } if (e instanceof RLike) { String pattern = ((RLike) e).pattern(); - if (inexact) { - q = new QueryStringQuery(e.source(), "/" + pattern + "/", target); - } - else { - q = new RegexQuery(e.source(), nameOf(e.field()), pattern); - } + q = new RegexQuery(e.source(), targetFieldName, pattern); } return q != null ? new QueryTranslation(wrapIfNested(q, e.field())) : null; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java index 9638a1bd305d2..df207269eec0f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java @@ -41,6 +41,8 @@ public final CompositeValuesSourceBuilder asValueSource() { builder.valueType(ValueType.STRING); } else if (script.outputType() == DataType.DATE) { builder.valueType(ValueType.LONG); + } else if (script.outputType() == DataType.TIME) { + builder.valueType(ValueType.LONG); } else if (script.outputType() == DataType.DATETIME) { builder.valueType(ValueType.LONG); } else if (script.outputType() == DataType.BOOLEAN) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index deeeed1c1ca16..d4cc5b16ec9c4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -45,6 +45,7 @@ public enum DataType { NESTED( "nested", JDBCType.STRUCT, -1, 0, 0, false, false, false), BINARY( "binary", JDBCType.VARBINARY, -1, Integer.MAX_VALUE, Integer.MAX_VALUE, false, false, false), DATE( JDBCType.DATE, Long.BYTES, 24, 24, false, false, true), + TIME( JDBCType.TIME, Long.BYTES, 3, 18, false, false, true), // since ODBC and JDBC interpret precision for Date as display size // the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone) // see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288 @@ -104,7 +105,7 @@ public enum DataType { // Date ODBC_TO_ES.put("SQL_DATE", DATE); - ODBC_TO_ES.put("SQL_TIME", DATETIME); + ODBC_TO_ES.put("SQL_TIME", TIME); ODBC_TO_ES.put("SQL_TIMESTAMP", DATETIME); // Intervals @@ -253,6 +254,14 @@ public boolean isPrimitive() { public boolean isDateBased() { return this == DATE || this == DATETIME; } + + public boolean isTimeBased() { + return this == TIME; + } + + public boolean isDateOrTimeBased() { + return isDateBased() || isTimeBased(); + } public static DataType fromOdbcType(String odbcType) { return ODBC_TO_ES.get(odbcType); @@ -278,6 +287,6 @@ public static DataType fromTypeName(String esType) { } public String format() { - return isDateBased() ? DateUtils.DATE_PARSE_FORMAT : null; + return isDateOrTimeBased() ? DateUtils.DATE_PARSE_FORMAT : null; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java index 9dbb2a3abb6f6..40a03e26eb0ef 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.util.DateUtils; +import java.time.OffsetTime; import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.util.Locale; @@ -22,6 +23,7 @@ import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.LONG; import static org.elasticsearch.xpack.sql.type.DataType.NULL; +import static org.elasticsearch.xpack.sql.type.DataType.TIME; /** * Conversions from one Elasticsearch data type to another Elasticsearch data types. @@ -87,8 +89,24 @@ public static DataType commonType(DataType left, DataType right) { return right; } } - if (left == DATETIME) { + if (left == TIME) { if (right == DATE) { + return DATETIME; + } + if (DataTypes.isInterval(right)) { + return left; + } + } + if (right == TIME) { + if (left == DATE) { + return DATETIME; + } + if (DataTypes.isInterval(left)) { + return right; + } + } + if (left == DATETIME) { + if (right == DATE || right == TIME) { return left; } if (DataTypes.isInterval(right)) { @@ -96,7 +114,7 @@ public static DataType commonType(DataType left, DataType right) { } } if (right == DATETIME) { - if (left == DATE) { + if (left == DATE || left == TIME) { return right; } if (DataTypes.isInterval(left)) { @@ -144,7 +162,7 @@ public static Conversion conversionFor(DataType from, DataType to) { Conversion conversion = conversion(from, to); if (conversion == null) { - throw new SqlIllegalArgumentException("cannot convert from [" + from + "] to [" + to + "]"); + throw new SqlIllegalArgumentException("cannot convert from [" + from.typeName + "] to [" + to.typeName + "]"); } return conversion; } @@ -170,6 +188,8 @@ private static Conversion conversion(DataType from, DataType to) { return conversionToDouble(from); case DATE: return conversionToDate(from); + case TIME: + return conversionToTime(from); case DATETIME: return conversionToDateTime(from); case BOOLEAN: @@ -184,6 +204,9 @@ private static Conversion conversionToString(DataType from) { if (from == DATE) { return Conversion.DATE_TO_STRING; } + if (from == TIME) { + return Conversion.TIME_TO_STRING; + } if (from == DATETIME) { return Conversion.DATETIME_TO_STRING; } @@ -213,6 +236,9 @@ private static Conversion conversionToLong(DataType from) { if (from == DATE) { return Conversion.DATE_TO_LONG; } + if (from == TIME) { + return Conversion.TIME_TO_LONG; + } if (from == DATETIME) { return Conversion.DATETIME_TO_LONG; } @@ -235,6 +261,9 @@ private static Conversion conversionToInt(DataType from) { if (from == DATE) { return Conversion.DATE_TO_INT; } + if (from == TIME) { + return Conversion.TIME_TO_INT; + } if (from == DATETIME) { return Conversion.DATETIME_TO_INT; } @@ -257,6 +286,9 @@ private static Conversion conversionToShort(DataType from) { if (from == DATE) { return Conversion.DATE_TO_SHORT; } + if (from == TIME) { + return Conversion.TIME_TO_SHORT; + } if (from == DATETIME) { return Conversion.DATETIME_TO_SHORT; } @@ -279,6 +311,9 @@ private static Conversion conversionToByte(DataType from) { if (from == DATE) { return Conversion.DATE_TO_BYTE; } + if (from == TIME) { + return Conversion.TIME_TO_BYTE; + } if (from == DATETIME) { return Conversion.DATETIME_TO_BYTE; } @@ -301,6 +336,9 @@ private static Conversion conversionToFloat(DataType from) { if (from == DATE) { return Conversion.DATE_TO_FLOAT; } + if (from == TIME) { + return Conversion.TIME_TO_FLOAT; + } if (from == DATETIME) { return Conversion.DATETIME_TO_FLOAT; } @@ -323,6 +361,9 @@ private static Conversion conversionToDouble(DataType from) { if (from == DATE) { return Conversion.DATE_TO_DOUBLE; } + if (from == TIME) { + return Conversion.TIME_TO_DOUBLE; + } if (from == DATETIME) { return Conversion.DATETIME_TO_DOUBLE; } @@ -348,6 +389,28 @@ private static Conversion conversionToDate(DataType from) { return null; } + private static Conversion conversionToTime(DataType from) { + if (from.isRational()) { + return Conversion.RATIONAL_TO_TIME; + } + if (from.isInteger()) { + return Conversion.INTEGER_TO_TIME; + } + if (from == BOOLEAN) { + return Conversion.BOOL_TO_TIME; // We emit an int here which is ok because of Java's casting rules + } + if (from.isString()) { + return Conversion.STRING_TO_TIME; + } + if (from == DATE) { + return Conversion.DATE_TO_TIME; + } + if (from == DATETIME) { + return Conversion.DATETIME_TO_TIME; + } + return null; + } + private static Conversion conversionToDateTime(DataType from) { if (from.isRational()) { return Conversion.RATIONAL_TO_DATETIME; @@ -377,6 +440,9 @@ private static Conversion conversionToBoolean(DataType from) { if (from == DATE) { return Conversion.DATE_TO_BOOLEAN; } + if (from == TIME) { + return Conversion.TIME_TO_BOOLEAN; + } if (from == DATETIME) { return Conversion.DATETIME_TO_BOOLEAN; } @@ -456,6 +522,7 @@ public enum Conversion { NULL(value -> null), DATE_TO_STRING(o -> DateUtils.toDateString((ZonedDateTime) o)), + TIME_TO_STRING(o -> DateUtils.toTimeString((OffsetTime) o)), DATETIME_TO_STRING(o -> DateUtils.toString((ZonedDateTime) o)), OTHER_TO_STRING(String::valueOf), @@ -463,6 +530,7 @@ public enum Conversion { INTEGER_TO_LONG(fromLong(value -> value)), STRING_TO_LONG(fromString(Long::valueOf, "long")), DATE_TO_LONG(fromDateTime(value -> value)), + TIME_TO_LONG(fromTime(value -> value)), DATETIME_TO_LONG(fromDateTime(value -> value)), RATIONAL_TO_INT(fromDouble(value -> safeToInt(safeToLong(value)))), @@ -470,6 +538,7 @@ public enum Conversion { BOOL_TO_INT(fromBool(value -> value ? 1 : 0)), STRING_TO_INT(fromString(Integer::valueOf, "integer")), DATE_TO_INT(fromDateTime(DataTypeConversion::safeToInt)), + TIME_TO_INT(fromTime(DataTypeConversion::safeToInt)), DATETIME_TO_INT(fromDateTime(DataTypeConversion::safeToInt)), RATIONAL_TO_SHORT(fromDouble(value -> safeToShort(safeToLong(value)))), @@ -477,6 +546,7 @@ public enum Conversion { BOOL_TO_SHORT(fromBool(value -> value ? (short) 1 : (short) 0)), STRING_TO_SHORT(fromString(Short::valueOf, "short")), DATE_TO_SHORT(fromDateTime(DataTypeConversion::safeToShort)), + TIME_TO_SHORT(fromTime(DataTypeConversion::safeToShort)), DATETIME_TO_SHORT(fromDateTime(DataTypeConversion::safeToShort)), RATIONAL_TO_BYTE(fromDouble(value -> safeToByte(safeToLong(value)))), @@ -484,6 +554,7 @@ public enum Conversion { BOOL_TO_BYTE(fromBool(value -> value ? (byte) 1 : (byte) 0)), STRING_TO_BYTE(fromString(Byte::valueOf, "byte")), DATE_TO_BYTE(fromDateTime(DataTypeConversion::safeToByte)), + TIME_TO_BYTE(fromTime(DataTypeConversion::safeToByte)), DATETIME_TO_BYTE(fromDateTime(DataTypeConversion::safeToByte)), // TODO floating point conversions are lossy but conversions to integer conversions are not. Are we ok with that? @@ -492,6 +563,7 @@ public enum Conversion { BOOL_TO_FLOAT(fromBool(value -> value ? 1f : 0f)), STRING_TO_FLOAT(fromString(Float::valueOf, "float")), DATE_TO_FLOAT(fromDateTime(value -> (float) value)), + TIME_TO_FLOAT(fromTime(value -> (float) value)), DATETIME_TO_FLOAT(fromDateTime(value -> (float) value)), RATIONAL_TO_DOUBLE(fromDouble(Double::valueOf)), @@ -499,6 +571,7 @@ public enum Conversion { BOOL_TO_DOUBLE(fromBool(value -> value ? 1d : 0d)), STRING_TO_DOUBLE(fromString(Double::valueOf, "double")), DATE_TO_DOUBLE(fromDateTime(Double::valueOf)), + TIME_TO_DOUBLE(fromTime(Double::valueOf)), DATETIME_TO_DOUBLE(fromDateTime(Double::valueOf)), RATIONAL_TO_DATE(toDate(RATIONAL_TO_LONG)), @@ -507,6 +580,13 @@ public enum Conversion { STRING_TO_DATE(fromString(DateUtils::asDateOnly, "date")), DATETIME_TO_DATE(fromDatetimeToDate()), + RATIONAL_TO_TIME(toTime(RATIONAL_TO_LONG)), + INTEGER_TO_TIME(toTime(INTEGER_TO_LONG)), + BOOL_TO_TIME(toTime(BOOL_TO_INT)), + STRING_TO_TIME(fromString(DateUtils::asTimeOnly, "time")), + DATE_TO_TIME(fromDatetimeToTime()), + DATETIME_TO_TIME(fromDatetimeToTime()), + RATIONAL_TO_DATETIME(toDateTime(RATIONAL_TO_LONG)), INTEGER_TO_DATETIME(toDateTime(INTEGER_TO_LONG)), BOOL_TO_DATETIME(toDateTime(BOOL_TO_INT)), @@ -516,6 +596,7 @@ public enum Conversion { NUMERIC_TO_BOOLEAN(fromLong(value -> value != 0)), STRING_TO_BOOLEAN(fromString(DataTypeConversion::convertToBoolean, "boolean")), DATE_TO_BOOLEAN(fromDateTime(value -> value != 0)), + TIME_TO_BOOLEAN(fromTime(value -> value != 0)), DATETIME_TO_BOOLEAN(fromDateTime(value -> value != 0)), BOOL_TO_LONG(fromBool(value -> value ? 1L : 0L)), @@ -557,22 +638,34 @@ private static Function fromBool(Function conve return (Object l) -> converter.apply(((Boolean) l)); } - private static Function fromDateTime(Function converter) { - return l -> converter.apply(((ZonedDateTime) l).toInstant().toEpochMilli()); + private static Function fromTime(Function converter) { + return l -> converter.apply(((OffsetTime) l).atDate(DateUtils.EPOCH).toInstant().toEpochMilli()); } - private static Function toDateTime(Conversion conversion) { - return l -> DateUtils.asDateTime(((Number) conversion.convert(l)).longValue()); + private static Function fromDateTime(Function converter) { + return l -> converter.apply(((ZonedDateTime) l).toInstant().toEpochMilli()); } private static Function toDate(Conversion conversion) { return l -> DateUtils.asDateOnly(((Number) conversion.convert(l)).longValue()); } + private static Function toTime(Conversion conversion) { + return l -> DateUtils.asTimeOnly(((Number) conversion.convert(l)).longValue()); + } + + private static Function toDateTime(Conversion conversion) { + return l -> DateUtils.asDateTime(((Number) conversion.convert(l)).longValue()); + } + private static Function fromDatetimeToDate() { return l -> DateUtils.asDateOnly((ZonedDateTime) l); } + private static Function fromDatetimeToTime() { + return l -> ((ZonedDateTime) l).toOffsetDateTime().toOffsetTime(); + } + public Object convert(Object l) { if (l == null) { return null; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java index f8d657447923a..c74f639745289 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java @@ -8,6 +8,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.literal.Interval; +import java.time.OffsetTime; import java.time.ZonedDateTime; import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN; @@ -27,6 +28,7 @@ import static org.elasticsearch.xpack.sql.type.DataType.LONG; import static org.elasticsearch.xpack.sql.type.DataType.NULL; import static org.elasticsearch.xpack.sql.type.DataType.SHORT; +import static org.elasticsearch.xpack.sql.type.DataType.TIME; import static org.elasticsearch.xpack.sql.type.DataType.UNSUPPORTED; import static org.elasticsearch.xpack.sql.type.DataType.fromTypeName; @@ -67,6 +69,9 @@ public static DataType fromJava(Object value) { if (value instanceof Short) { return SHORT; } + if (value instanceof OffsetTime) { + return TIME; + } if (value instanceof ZonedDateTime) { return DATETIME; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java index 38db3cbe131cf..45072f7f480b1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java @@ -12,6 +12,7 @@ import java.time.Instant; import java.time.LocalDate; +import java.time.OffsetTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; @@ -19,11 +20,15 @@ import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE; import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; +import static java.time.format.DateTimeFormatter.ISO_TIME; public final class DateUtils { public static final ZoneId UTC = ZoneId.of("Z"); public static final String DATE_PARSE_FORMAT = "epoch_millis"; + // In Java 8 LocalDate.EPOCH is not available, introduced with later Java versions + public static final LocalDate EPOCH = LocalDate.of(1970, 1, 1); + public static final long DAY_IN_MILLIS = 60 * 60 * 24 * 1000L; private static final DateTimeFormatter DATE_TIME_ESCAPED_LITERAL_FORMATTER = new DateTimeFormatterBuilder() .append(ISO_LOCAL_DATE) @@ -33,8 +38,6 @@ public final class DateUtils { private static final DateFormatter UTC_DATE_TIME_FORMATTER = DateFormatter.forPattern("date_optional_time").withZone(UTC); - private static final long DAY_IN_MILLIS = 60 * 60 * 24 * 1000L; - private DateUtils() {} /** @@ -44,6 +47,24 @@ public static ZonedDateTime asDateOnly(long millis) { return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC).toLocalDate().atStartOfDay(UTC); } + /** + * Creates an date for SQL TIME type from the millis since epoch. + */ + public static OffsetTime asTimeOnly(long millis) { + return OffsetTime.ofInstant(Instant.ofEpochMilli(millis % DAY_IN_MILLIS), UTC); + } + + /** + * Creates an date for SQL TIME type from the millis since epoch. + */ + public static OffsetTime asTimeOnly(long millis, ZoneId zoneId) { + return OffsetTime.ofInstant(Instant.ofEpochMilli(millis % DAY_IN_MILLIS), zoneId); + } + + public static OffsetTime asTimeAtZone(OffsetTime time, ZoneId zonedId) { + return time.atDate(DateUtils.EPOCH).atZoneSameInstant(zonedId).toOffsetDateTime().toOffsetTime(); + } + /** * Creates a datetime from the millis since epoch (thus the time-zone is UTC). */ @@ -69,6 +90,10 @@ public static ZonedDateTime asDateOnly(ZonedDateTime zdt) { return zdt.toLocalDate().atStartOfDay(zdt.getZone()); } + public static OffsetTime asTimeOnly(String timeFormat) { + return DateFormatters.from(ISO_TIME.parse(timeFormat)).toOffsetDateTime().toOffsetTime(); + } + /** * Parses the given string into a DateTime using UTC as a default timezone. */ @@ -88,6 +113,10 @@ public static String toDateString(ZonedDateTime date) { return date.format(ISO_LOCAL_DATE); } + public static String toTimeString(OffsetTime time) { + return time.format(ISO_LOCAL_TIME); + } + public static long minDayInterval(long l) { if (l < DAY_IN_MILLIS ) { return DAY_IN_MILLIS; diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index f628d1be868c3..56b911389447a 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -12,6 +12,9 @@ class org.elasticsearch.xpack.sql.expression.literal.IntervalDayTime { class org.elasticsearch.xpack.sql.expression.literal.IntervalYearMonth { } +class java.time.OffsetTime { +} + class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalSqlScriptUtils { # @@ -107,6 +110,7 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS IntervalDayTime intervalDayTime(String, String) IntervalYearMonth intervalYearMonth(String, String) ZonedDateTime asDateTime(Object) + OffsetTime asTime(String) # # ASCII Functions diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 9d55d4aeec7b5..3b1e8da318ff2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -203,10 +203,42 @@ public void testExtractNonDateTime() { assertEquals("1:8: Invalid datetime field [ABS]. Use any datetime function.", error("SELECT EXTRACT(ABS FROM date) FROM test")); } + public void testValidDateTimeFunctionsOnTime() { + accept("SELECT HOUR_OF_DAY(CAST(date AS TIME)) FROM test"); + accept("SELECT MINUTE_OF_HOUR(CAST(date AS TIME)) FROM test"); + accept("SELECT MINUTE_OF_DAY(CAST(date AS TIME)) FROM test"); + accept("SELECT SECOND_OF_MINUTE(CAST(date AS TIME)) FROM test"); + } + + public void testInvalidDateTimeFunctionsOnTime() { + assertEquals("1:8: argument of [DAY_OF_YEAR(CAST(date AS TIME))] must be [date or datetime], " + + "found value [CAST(date AS TIME)] type [time]", + error("SELECT DAY_OF_YEAR(CAST(date AS TIME)) FROM test")); + } + + public void testGroupByOnTimeNotAllowed() { + assertEquals("1:36: Function [CAST(date AS TIME)] with data type [time] cannot be used for grouping", + error("SELECT count(*) FROM test GROUP BY CAST(date AS TIME)")); + } + + public void testGroupByOnTimeWrappedWithScalar() { + accept("SELECT count(*) FROM test GROUP BY MINUTE(CAST(date AS TIME))"); + } + + public void testHistogramOnTimeNotAllowed() { + assertEquals("1:8: first argument of [HISTOGRAM] must be [date, datetime or numeric], " + + "found value [CAST(date AS TIME)] type [time]", + error("SELECT HISTOGRAM(CAST(date AS TIME), INTERVAL 1 MONTH), COUNT(*) FROM test GROUP BY 1")); + } + public void testSubtractFromInterval() { assertEquals("1:8: Cannot subtract a datetime[CAST('2000-01-01' AS DATETIME)] " + "from an interval[INTERVAL 1 MONTH]; do you mean the reverse?", error("SELECT INTERVAL 1 MONTH - CAST('2000-01-01' AS DATETIME)")); + + assertEquals("1:8: Cannot subtract a time[CAST('12:23:56.789' AS TIME)] " + + "from an interval[INTERVAL 1 MONTH]; do you mean the reverse?", + error("SELECT INTERVAL 1 MONTH - CAST('12:23:56.789' AS TIME)")); } public void testMultipleColumns() { @@ -293,7 +325,7 @@ public void testStarOnNested() { } public void testGroupByOnInexact() { - assertEquals("1:36: Field of data type [text] cannot be used for grouping; " + + assertEquals("1:36: Field [text] of data type [text] cannot be used for grouping; " + "No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead", error("SELECT COUNT(*) FROM test GROUP BY text")); } @@ -535,12 +567,18 @@ public void testInvalidTypeForFunction_WithFourArgs() { error("SELECT INSERT('text', 1, 2, 3)")); } - public void testInvalidTypeForRegexMatch() { + public void testInvalidTypeForLikeMatch() { assertEquals("1:26: [text LIKE 'foo'] cannot operate on field of data type [text]: " + "No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead", error("SELECT * FROM test WHERE text LIKE 'foo'")); } + public void testInvalidTypeForRLikeMatch() { + assertEquals("1:26: [text RLIKE 'foo'] cannot operate on field of data type [text]: " + + "No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead", + error("SELECT * FROM test WHERE text RLIKE 'foo'")); + } + public void testAllowCorrectFieldsInIncompatibleMappings() { assertNotNull(incompatibleAccept("SELECT languages FROM \"*\"")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java index 03f9c949d2992..6e248fb7794df 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java @@ -7,17 +7,20 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import java.io.IOException; +import java.time.OffsetTime; +import java.time.ZoneId; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; +import static org.hamcrest.Matchers.startsWith; public class DateTimeProcessorTests extends AbstractWireSerializingTestCase { public static DateTimeProcessor randomDateTimeProcessor() { - return new DateTimeProcessor(randomFrom(DateTimeExtractor.values()), UTC); + return new DateTimeProcessor(randomFrom(DateTimeExtractor.values()), randomZone()); } @Override @@ -31,12 +34,12 @@ protected Reader instanceReader() { } @Override - protected DateTimeProcessor mutateInstance(DateTimeProcessor instance) throws IOException { + protected DateTimeProcessor mutateInstance(DateTimeProcessor instance) { DateTimeExtractor replaced = randomValueOtherThan(instance.extractor(), () -> randomFrom(DateTimeExtractor.values())); - return new DateTimeProcessor(replaced, UTC); + return new DateTimeProcessor(replaced, randomZone()); } - public void testApply() { + public void testApply_withTimezoneUTC() { DateTimeProcessor proc = new DateTimeProcessor(DateTimeExtractor.YEAR, UTC); assertEquals(1970, proc.process(dateTime(0L))); assertEquals(2017, proc.process(dateTime(2017, 01, 02, 10, 10))); @@ -46,4 +49,21 @@ public void testApply() { assertEquals(2, proc.process(dateTime(2017, 01, 02, 10, 10))); assertEquals(31, proc.process(dateTime(2017, 01, 31, 10, 10))); } + + public void testApply_withTimezoneOtherThanUTC() { + ZoneId zoneId = ZoneId.of("Etc/GMT-10"); + DateTimeProcessor proc = new DateTimeProcessor(DateTimeExtractor.YEAR, zoneId); + assertEquals(2018, proc.process(dateTime(2017, 12, 31, 18, 10))); + + proc = new DateTimeProcessor(DateTimeExtractor.DAY_OF_MONTH, zoneId); + assertEquals(1, proc.process(dateTime(2017, 12, 31, 20, 30))); + } + + public void testFailOnTime() { + DateTimeProcessor proc = new DateTimeProcessor(DateTimeExtractor.YEAR, UTC); + SqlIllegalArgumentException e = expectThrows(SqlIllegalArgumentException.class, () -> { + proc.process(OffsetTime.now(UTC)); + }); + assertThat(e.getMessage(), startsWith("A [date], a [time] or a [datetime] is required; received ")); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java index 4323cce234c54..13215eb41aebc 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java @@ -8,6 +8,8 @@ import org.elasticsearch.xpack.sql.util.DateUtils; +import java.time.OffsetTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; public class DateTimeTestUtils { @@ -25,4 +27,12 @@ public static ZonedDateTime dateTime(long millisSinceEpoch) { public static ZonedDateTime date(long millisSinceEpoch) { return DateUtils.asDateOnly(millisSinceEpoch); } + + public static OffsetTime time(long millisSinceEpoch) { + return DateUtils.asTimeOnly(millisSinceEpoch); + } + + public static OffsetTime time(int hour, int minute, int second, int nano) { + return OffsetTime.of(hour, minute, second, nano, ZoneOffset.UTC); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/TimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/TimeProcessorTests.java new file mode 100644 index 0000000000000..65b2cde2d0a69 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/TimeProcessorTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; + +import java.time.ZoneId; + +import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.time; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; + +public class TimeProcessorTests extends AbstractWireSerializingTestCase { + + public static TimeProcessor randomTimeProcessor() { + return new TimeProcessor(randomFrom(DateTimeExtractor.values()), randomZone()); + } + + @Override + protected TimeProcessor createTestInstance() { + return randomTimeProcessor(); + } + + @Override + protected Reader instanceReader() { + return TimeProcessor::new; + } + + @Override + protected TimeProcessor mutateInstance(TimeProcessor instance) { + DateTimeExtractor replaced = randomValueOtherThan(instance.extractor(), () -> randomFrom(DateTimeExtractor.values())); + return new TimeProcessor(replaced, randomZone()); + } + + public void testApply_withTimeZoneUTC() { + TimeProcessor proc = new TimeProcessor(DateTimeExtractor.SECOND_OF_MINUTE, UTC); + assertEquals(0, proc.process(time(0L))); + assertEquals(2, proc.process(time(2345L))); + + proc = new TimeProcessor(DateTimeExtractor.MINUTE_OF_DAY, UTC); + assertEquals(0, proc.process(time(0L))); + assertEquals(620, proc.process(time(10, 20, 30, 123456789))); + + proc = new TimeProcessor(DateTimeExtractor.MINUTE_OF_HOUR, UTC); + assertEquals(0, proc.process(time(0L))); + assertEquals(20, proc.process(time(10, 20, 30, 123456789))); + + proc = new TimeProcessor(DateTimeExtractor.HOUR_OF_DAY, UTC); + assertEquals(0, proc.process(time(0L))); + assertEquals(10, proc.process(time(10, 20, 30, 123456789))); + } + + public void testApply_withTimeZoneOtherThanUTC() { + ZoneId zoneId = ZoneId.of("Etc/GMT-10"); + + TimeProcessor proc = new TimeProcessor(DateTimeExtractor.SECOND_OF_MINUTE, zoneId); + assertEquals(0, proc.process(time(0L))); + assertEquals(2, proc.process(time(2345L))); + + proc = new TimeProcessor(DateTimeExtractor.MINUTE_OF_DAY, zoneId); + assertEquals(600, proc.process(time(0L))); + assertEquals(1220, proc.process(time(10, 20, 30, 123456789))); + + proc = new TimeProcessor(DateTimeExtractor.MINUTE_OF_HOUR, zoneId); + assertEquals(0, proc.process(time(0L))); + assertEquals(20, proc.process(time(10, 20, 30, 123456789))); + + proc = new TimeProcessor(DateTimeExtractor.HOUR_OF_DAY, zoneId); + assertEquals(10, proc.process(time(0L))); + assertEquals(20, proc.process(time(10, 20, 30, 123456789)));; + assertEquals(4, proc.process(time(18, 20, 30, 123456789))); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java index 696f999b0b051..1c4b0697f959e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.sql.util.DateUtils; import java.time.Duration; +import java.time.OffsetTime; import java.time.Period; import java.time.ZonedDateTime; import java.time.temporal.TemporalAmount; @@ -104,6 +105,33 @@ public void testAddDayTimeIntervalToDateTimeReverse() { assertEquals(L(now.plus(t)), L(x)); } + public void testAddYearMonthIntervalToTime() { + OffsetTime now = OffsetTime.now(DateUtils.UTC); + Literal l = L(now); + TemporalAmount t = Period.ofYears(100).plusMonths(50); + Literal r = interval(t, INTERVAL_HOUR); + OffsetTime x = add(l, r); + assertEquals(L(now), L(x)); + } + + public void testAddDayTimeIntervalToTime() { + OffsetTime now = OffsetTime.now(DateUtils.UTC); + Literal l = L(now); + TemporalAmount t = Duration.ofHours(32); + Literal r = interval(Duration.ofHours(32), INTERVAL_HOUR); + OffsetTime x = add(l, r); + assertEquals(L(now.plus(t)), L(x)); + } + + public void testAddDayTimeIntervalToTimeReverse() { + OffsetTime now = OffsetTime.now(DateUtils.UTC); + Literal l = L(now); + TemporalAmount t = Duration.ofHours(45); + Literal r = interval(Duration.ofHours(45), INTERVAL_HOUR); + OffsetTime x = add(r, l); + assertEquals(L(now.plus(t)), L(x)); + } + public void testAddNumberToIntervalIllegal() { Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); SqlIllegalArgumentException expect = expectThrows(SqlIllegalArgumentException.class, () -> add(r, L(1))); @@ -142,12 +170,6 @@ public void testSubYearMonthIntervalToDateTimeIllegal() { assertEquals("Cannot subtract a date from an interval; do you mean the reverse?", ex.getMessage()); } - public void testSubNumberFromIntervalIllegal() { - Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); - SqlIllegalArgumentException expect = expectThrows(SqlIllegalArgumentException.class, () -> sub(r, L(1))); - assertEquals("Cannot compute [-] between [IntervalDayTime] [Integer]", expect.getMessage()); - } - public void testSubDayTimeIntervalToDateTime() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); @@ -157,7 +179,40 @@ public void testSubDayTimeIntervalToDateTime() { assertEquals(L(now.minus(t)), L(x)); } - public void testMulIntervalNumber() throws Exception { + public void testSubYearMonthIntervalToTime() { + OffsetTime now = OffsetTime.now(DateUtils.UTC); + Literal l = L(now); + TemporalAmount t = Period.ofYears(100).plusMonths(50); + Literal r = interval(t, INTERVAL_HOUR); + OffsetTime x = sub(l, r); + assertEquals(L(now), L(x)); + } + + public void testSubYearMonthIntervalToTimeIllegal() { + OffsetTime now = OffsetTime.now(DateUtils.UTC); + Literal l = L(now); + TemporalAmount t = Period.ofYears(100).plusMonths(50); + Literal r = interval(t, INTERVAL_HOUR); + SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> sub(r, l)); + assertEquals("Cannot subtract a date from an interval; do you mean the reverse?", ex.getMessage()); + } + + public void testSubDayTimeIntervalToTime() { + OffsetTime now = OffsetTime.now(DateUtils.UTC); + Literal l = L(now); + TemporalAmount t = Duration.ofHours(36); + Literal r = interval(Duration.ofHours(36), INTERVAL_HOUR); + OffsetTime x = sub(l, r); + assertEquals(L(now.minus(t)), L(x)); + } + + public void testSubNumberFromIntervalIllegal() { + Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); + SqlIllegalArgumentException expect = expectThrows(SqlIllegalArgumentException.class, () -> sub(r, L(1))); + assertEquals("Cannot compute [-] between [IntervalDayTime] [Integer]", expect.getMessage()); + } + + public void testMulIntervalNumber() { Literal l = interval(Duration.ofHours(2), INTERVAL_HOUR); IntervalDayTime interval = mul(l, -1); assertEquals(INTERVAL_HOUR, interval.dataType()); @@ -165,7 +220,7 @@ public void testMulIntervalNumber() throws Exception { assertEquals(Duration.ofHours(2).negated(), p); } - public void testMulNumberInterval() throws Exception { + public void testMulNumberInterval() { Literal r = interval(Period.ofYears(1), INTERVAL_YEAR); IntervalYearMonth interval = mul(-2, r); assertEquals(INTERVAL_YEAR, interval.dataType()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java index bc3ea049c242e..6fd4611a43416 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.sql.parser; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; @@ -180,9 +179,9 @@ public void testDateLiteralValidation() { ex.getMessage()); } - public void testTimeLiteralUnsupported() { - SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> timeLiteral("10:10:10")); - assertThat(ex.getMessage(), is("Time (only) literals are not supported; a date component is required as well")); + public void testTimeLiteral() { + Literal l = timeLiteral("12:23:56"); + assertThat(l.dataType(), is(DataType.TIME)); } public void testTimeLiteralValidation() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java index c6c993967dd1b..110c320d679e4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java @@ -57,11 +57,11 @@ private Tuple sql(String sql) { return new Tuple<>(cmd, session); } - public void testSysTypes() throws Exception { + public void testSysTypes() { Command cmd = sql("SYS TYPES").v1(); List names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "FLOAT", "DOUBLE", "SCALED_FLOAT", - "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", "DATETIME", + "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", "TIME", "DATETIME", "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", @@ -86,11 +86,11 @@ public void testSysTypes() throws Exception { }, ex -> fail(ex.getMessage()))); } - public void testSysColsNoArgs() throws Exception { + public void testSysColsNoArgs() { runSysColumns("SYS COLUMNS"); } - public void testSysColumnEmptyCatalog() throws Exception { + public void testSysColumnEmptyCatalog() { Tuple sql = sql("SYS COLUMNS CATALOG '' TABLE LIKE '%' LIKE '%'"); sql.v1().execute(sql.v2(), ActionListener.wrap(r -> { @@ -99,7 +99,7 @@ public void testSysColumnEmptyCatalog() throws Exception { }, ex -> fail(ex.getMessage()))); } - public void testSysColsTableOnlyCatalog() throws Exception { + public void testSysColsTableOnlyCatalog() { Tuple sql = sql("SYS COLUMNS CATALOG 'catalog'"); sql.v1().execute(sql.v2(), ActionListener.wrap(r -> { @@ -108,20 +108,20 @@ public void testSysColsTableOnlyCatalog() throws Exception { }, ex -> fail(ex.getMessage()))); } - public void testSysColsTableOnlyPattern() throws Exception { + public void testSysColsTableOnlyPattern() { runSysColumns("SYS COLUMNS TABLE LIKE 'test'"); } - public void testSysColsColOnlyPattern() throws Exception { + public void testSysColsColOnlyPattern() { runSysColumns("SYS COLUMNS LIKE '%'"); } - public void testSysColsTableAndColsPattern() throws Exception { + public void testSysColsTableAndColsPattern() { runSysColumns("SYS COLUMNS TABLE LIKE 'test' LIKE '%'"); } - private void runSysColumns(String commandVariation) throws Exception { + private void runSysColumns(String commandVariation) { Tuple sql = sql(commandVariation); List names = asList("bool", "int", diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java index 9c1ef31fcb170..4a8da68a1d51e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java @@ -44,7 +44,7 @@ public void testSysTypes() { Command cmd = sql("SYS TYPES").v1(); List names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "FLOAT", "DOUBLE", "SCALED_FLOAT", - "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", "DATETIME", + "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", "TIME", "DATETIME", "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index a39b5466bc10f..c76e0da987d55 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -41,11 +41,12 @@ import org.elasticsearch.xpack.sql.querydsl.query.ExistsQuery; import org.elasticsearch.xpack.sql.querydsl.query.NotQuery; import org.elasticsearch.xpack.sql.querydsl.query.Query; -import org.elasticsearch.xpack.sql.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.sql.querydsl.query.RangeQuery; +import org.elasticsearch.xpack.sql.querydsl.query.RegexQuery; import org.elasticsearch.xpack.sql.querydsl.query.ScriptQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermsQuery; +import org.elasticsearch.xpack.sql.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; @@ -186,20 +187,41 @@ public void testLikeOnInexact() { assertTrue(p instanceof Filter); Expression condition = ((Filter) p).condition(); QueryTranslation qt = QueryTranslator.toQuery(condition, false); - assertEquals(QueryStringQuery.class, qt.query.getClass()); - QueryStringQuery qsq = ((QueryStringQuery) qt.query); - assertEquals(1, qsq.fields().size()); - assertEquals("some.string.typical", qsq.fields().keySet().iterator().next()); + assertEquals(WildcardQuery.class, qt.query.getClass()); + WildcardQuery qsq = ((WildcardQuery) qt.query); + assertEquals("some.string.typical", qsq.field()); + } + + public void testRLikeOnInexact() { + LogicalPlan p = plan("SELECT * FROM test WHERE some.string RLIKE '.*a.*'"); + assertTrue(p instanceof Project); + p = ((Project) p).child(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); + QueryTranslation qt = QueryTranslator.toQuery(condition, false); + assertEquals(RegexQuery.class, qt.query.getClass()); + RegexQuery qsq = ((RegexQuery) qt.query); + assertEquals("some.string.typical", qsq.field()); } public void testLikeConstructsNotSupported() { - LogicalPlan p = plan("SELECT LTRIM(keyword) lt FROM test WHERE LTRIM(keyword) LIKE '%a%'"); + LogicalPlan p = plan("SELECT LTRIM(keyword) lt FROM test WHERE LTRIM(keyword) like '%a%'"); assertTrue(p instanceof Project); p = ((Project) p).child(); assertTrue(p instanceof Filter); Expression condition = ((Filter) p).condition(); SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> QueryTranslator.toQuery(condition, false)); - assertEquals("Scalar function (LTRIM(keyword)) not allowed (yet) as arguments for LIKE", ex.getMessage()); + assertEquals("Scalar function [LTRIM(keyword)] not allowed (yet) as argument for LIKE", ex.getMessage()); + } + + public void testRLikeConstructsNotSupported() { + LogicalPlan p = plan("SELECT LTRIM(keyword) lt FROM test WHERE LTRIM(keyword) RLIKE '.*a.*'"); + assertTrue(p instanceof Project); + p = ((Project) p).child(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); + SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> QueryTranslator.toQuery(condition, false)); + assertEquals("Scalar function [LTRIM(keyword)] not allowed (yet) as argument for RLIKE", ex.getMessage()); } public void testDifferentLikeAndNotLikePatterns() { @@ -213,20 +235,18 @@ public void testDifferentLikeAndNotLikePatterns() { assertEquals(BoolQuery.class, qt.query.getClass()); BoolQuery bq = ((BoolQuery) qt.query); assertTrue(bq.isAnd()); - assertTrue(bq.left() instanceof QueryStringQuery); + assertTrue(bq.left() instanceof WildcardQuery); assertTrue(bq.right() instanceof NotQuery); NotQuery nq = (NotQuery) bq.right(); - assertTrue(nq.child() instanceof QueryStringQuery); - QueryStringQuery lqsq = (QueryStringQuery) bq.left(); - QueryStringQuery rqsq = (QueryStringQuery) nq.child(); + assertTrue(nq.child() instanceof WildcardQuery); + WildcardQuery lqsq = (WildcardQuery) bq.left(); + WildcardQuery rqsq = (WildcardQuery) nq.child(); assertEquals("X*", lqsq.query()); - assertEquals(1, lqsq.fields().size()); - assertEquals("keyword", lqsq.fields().keySet().iterator().next()); + assertEquals("keyword", lqsq.field()); assertEquals("Y*", rqsq.query()); - assertEquals(1, rqsq.fields().size()); - assertEquals("keyword", rqsq.fields().keySet().iterator().next()); + assertEquals("keyword", rqsq.field()); } public void testRLikePatterns() { @@ -248,20 +268,18 @@ private void assertDifferentRLikeAndNotRLikePatterns(String firstPattern, String assertEquals(BoolQuery.class, qt.query.getClass()); BoolQuery bq = ((BoolQuery) qt.query); assertTrue(bq.isAnd()); - assertTrue(bq.left() instanceof QueryStringQuery); + assertTrue(bq.left() instanceof RegexQuery); assertTrue(bq.right() instanceof NotQuery); NotQuery nq = (NotQuery) bq.right(); - assertTrue(nq.child() instanceof QueryStringQuery); - QueryStringQuery lqsq = (QueryStringQuery) bq.left(); - QueryStringQuery rqsq = (QueryStringQuery) nq.child(); + assertTrue(nq.child() instanceof RegexQuery); + RegexQuery lqsq = (RegexQuery) bq.left(); + RegexQuery rqsq = (RegexQuery) nq.child(); - assertEquals("/" + firstPattern + "/", lqsq.query()); - assertEquals(1, lqsq.fields().size()); - assertEquals("keyword", lqsq.fields().keySet().iterator().next()); - assertEquals("/" + secondPattern + "/", rqsq.query()); - assertEquals(1, rqsq.fields().size()); - assertEquals("keyword", rqsq.fields().keySet().iterator().next()); + assertEquals(firstPattern, lqsq.regex()); + assertEquals("keyword", lqsq.field()); + assertEquals(secondPattern, rqsq.regex()); + assertEquals("keyword", rqsq.field()); } public void testTranslateNotExpression_WhereClause_Painless() { @@ -644,7 +662,7 @@ public void testGroupByHistogramWithDateTruncateIntervalToDayMultiples() { assertEquals(259200000L, ((GroupByDateHistogram) eqe.queryContainer().aggs().groups().get(0)).interval()); } } - + public void testCountAndCountDistinctFolding() { PhysicalPlan p = optimizeAndPlan("SELECT COUNT(DISTINCT keyword) dkey, COUNT(keyword) key FROM test"); assertEquals(EsQueryExec.class, p.getClass()); @@ -789,57 +807,56 @@ public void testTopHitsAggregationWithTwoArgs() { } } - - public void testGlobalCountInImplicitGroupByForcesTrackHits() throws Exception { + public void testGlobalCountInImplicitGroupByForcesTrackHits() { PhysicalPlan p = optimizeAndPlan("SELECT COUNT(*) FROM test"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec eqe = (EsQueryExec) p; assertTrue("Should be tracking hits", eqe.queryContainer().shouldTrackHits()); } - public void testGlobalCountAllInImplicitGroupByForcesTrackHits() throws Exception { + public void testGlobalCountAllInImplicitGroupByForcesTrackHits() { PhysicalPlan p = optimizeAndPlan("SELECT COUNT(ALL *) FROM test"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec eqe = (EsQueryExec) p; assertTrue("Should be tracking hits", eqe.queryContainer().shouldTrackHits()); } - public void testGlobalCountInSpecificGroupByDoesNotForceTrackHits() throws Exception { + public void testGlobalCountInSpecificGroupByDoesNotForceTrackHits() { PhysicalPlan p = optimizeAndPlan("SELECT COUNT(*) FROM test GROUP BY int"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec eqe = (EsQueryExec) p; assertFalse("Should NOT be tracking hits", eqe.queryContainer().shouldTrackHits()); } - public void testFieldAllCountDoesNotTrackHits() throws Exception { + public void testFieldAllCountDoesNotTrackHits() { PhysicalPlan p = optimizeAndPlan("SELECT COUNT(ALL int) FROM test"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec eqe = (EsQueryExec) p; assertFalse("Should NOT be tracking hits", eqe.queryContainer().shouldTrackHits()); } - public void testFieldCountDoesNotTrackHits() throws Exception { + public void testFieldCountDoesNotTrackHits() { PhysicalPlan p = optimizeAndPlan("SELECT COUNT(int) FROM test"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec eqe = (EsQueryExec) p; assertFalse("Should NOT be tracking hits", eqe.queryContainer().shouldTrackHits()); } - public void testDistinctCountDoesNotTrackHits() throws Exception { + public void testDistinctCountDoesNotTrackHits() { PhysicalPlan p = optimizeAndPlan("SELECT COUNT(DISTINCT int) FROM test"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec eqe = (EsQueryExec) p; assertFalse("Should NOT be tracking hits", eqe.queryContainer().shouldTrackHits()); } - public void testNoCountDoesNotTrackHits() throws Exception { + public void testNoCountDoesNotTrackHits() { PhysicalPlan p = optimizeAndPlan("SELECT int FROM test"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec eqe = (EsQueryExec) p; assertFalse("Should NOT be tracking hits", eqe.queryContainer().shouldTrackHits()); } - public void testZonedDateTimeInScripts() throws Exception { + public void testZonedDateTimeInScripts() { PhysicalPlan p = optimizeAndPlan( "SELECT date FROM test WHERE date + INTERVAL 1 YEAR > CAST('2019-03-11T12:34:56.000Z' AS DATETIME)"); assertEquals(EsQueryExec.class, p.getClass()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index a72f9ee7f1244..d44f69393f12b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -11,14 +11,16 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataTypeConversion.Conversion; -import org.elasticsearch.xpack.sql.util.DateUtils; +import java.time.OffsetTime; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.date; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.time; import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.sql.type.DataType.BYTE; import static org.elasticsearch.xpack.sql.type.DataType.DATE; @@ -38,12 +40,15 @@ import static org.elasticsearch.xpack.sql.type.DataType.NULL; import static org.elasticsearch.xpack.sql.type.DataType.SHORT; import static org.elasticsearch.xpack.sql.type.DataType.TEXT; +import static org.elasticsearch.xpack.sql.type.DataType.TIME; import static org.elasticsearch.xpack.sql.type.DataType.UNSUPPORTED; import static org.elasticsearch.xpack.sql.type.DataType.fromTypeName; import static org.elasticsearch.xpack.sql.type.DataType.values; import static org.elasticsearch.xpack.sql.type.DataTypeConversion.commonType; import static org.elasticsearch.xpack.sql.type.DataTypeConversion.conversionFor; +import static org.elasticsearch.xpack.sql.util.DateUtils.asDateOnly; import static org.elasticsearch.xpack.sql.util.DateUtils.asDateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.asTimeOnly; public class DataTypeConversionTests extends ESTestCase { @@ -58,8 +63,16 @@ public void testConversionToString() { { Conversion conversion = conversionFor(DATE, to); assertNull(conversion.convert(null)); - assertEquals("1973-11-29", conversion.convert(DateUtils.asDateOnly(123456789101L))); - assertEquals("1966-02-02", conversion.convert(DateUtils.asDateOnly(-123456789101L))); + assertEquals("1973-11-29", conversion.convert(asDateOnly(123456789101L))); + assertEquals("1966-02-02", conversion.convert(asDateOnly(-123456789101L))); + } + { + Conversion conversion = conversionFor(TIME, to); + assertNull(conversion.convert(null)); + assertEquals("00:02:03.456", conversion.convert(asTimeOnly(123456L))); + assertEquals("21:33:09.101", conversion.convert(asTimeOnly(123456789101L))); + assertEquals("23:57:56.544", conversion.convert(asTimeOnly(-123456L))); + assertEquals("02:26:50.899", conversion.convert(asTimeOnly(-123456789101L))); } { Conversion conversion = conversionFor(DATETIME, to); @@ -98,8 +111,16 @@ public void testConversionToLong() { { Conversion conversion = conversionFor(DATE, to); assertNull(conversion.convert(null)); - assertEquals(123379200000L, conversion.convert(DateUtils.asDateOnly(123456789101L))); - assertEquals(-123465600000L, conversion.convert(DateUtils.asDateOnly(-123456789101L))); + assertEquals(123379200000L, conversion.convert(asDateOnly(123456789101L))); + assertEquals(-123465600000L, conversion.convert(asDateOnly(-123456789101L))); + } + { + Conversion conversion = conversionFor(TIME, to); + assertNull(conversion.convert(null)); + assertEquals(123456L, conversion.convert(asTimeOnly(123456L))); + assertEquals(77589101L, conversion.convert(asTimeOnly(123456789101L))); + assertEquals(86276544L, conversion.convert(asTimeOnly(-123456L))); + assertEquals(8810899L, conversion.convert(asTimeOnly(-123456789101L))); } { Conversion conversion = conversionFor(DATETIME, to); @@ -140,6 +161,10 @@ public void testConversionToDate() { assertEquals(date(1), conversion.convert(true)); assertEquals(date(0), conversion.convert(false)); } + { + Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversionFor(TIME, to)); + assertEquals("cannot convert from [time] to [date]", e.getMessage()); + } { Conversion conversion = conversionFor(DATETIME, to); assertNull(conversion.convert(null)); @@ -160,12 +185,67 @@ public void testConversionToDate() { ZonedDateTime zdt = org.elasticsearch.common.time.DateUtils.nowWithMillisResolution(); Conversion forward = conversionFor(DATE, KEYWORD); Conversion back = conversionFor(KEYWORD, DATE); - assertEquals(DateUtils.asDateOnly(zdt), back.convert(forward.convert(zdt))); + assertEquals(asDateOnly(zdt), back.convert(forward.convert(zdt))); Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [date]: Text '0xff' could not be parsed at index 0", e.getMessage()); } } + public void testConversionToTime() { + DataType to = TIME; + { + Conversion conversion = conversionFor(DOUBLE, to); + assertNull(conversion.convert(null)); + assertEquals(time(10L), conversion.convert(10.0)); + assertEquals(time(10L), conversion.convert(10.1)); + assertEquals(time(11L), conversion.convert(10.6)); + Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(Double.MAX_VALUE)); + assertEquals("[" + Double.MAX_VALUE + "] out of [long] range", e.getMessage()); + } + { + Conversion conversion = conversionFor(INTEGER, to); + assertNull(conversion.convert(null)); + assertEquals(time(10L), conversion.convert(10)); + assertEquals(time(-134L), conversion.convert(-134)); + } + { + Conversion conversion = conversionFor(BOOLEAN, to); + assertNull(conversion.convert(null)); + assertEquals(time(1), conversion.convert(true)); + assertEquals(time(0), conversion.convert(false)); + } + { + Conversion conversion = conversionFor(DATE, to); + assertNull(conversion.convert(null)); + assertEquals(time(123379200000L), conversion.convert(asDateOnly(123456789101L))); + assertEquals(time(-123465600000L), conversion.convert(asDateOnly(-123456789101L))); + } + { + Conversion conversion = conversionFor(DATETIME, to); + assertNull(conversion.convert(null)); + assertEquals(time(77589101L), conversion.convert(asDateTime(123456789101L))); + assertEquals(time(8810899L), conversion.convert(asDateTime(-123456789101L))); + } + { + Conversion conversion = conversionFor(KEYWORD, to); + assertNull(conversion.convert(null)); + + assertEquals(time(0L), conversion.convert("00:00:00Z")); + assertEquals(time(1000L), conversion.convert("00:00:01Z")); + assertEquals(time(1234L), conversion.convert("00:00:01.234Z")); + assertEquals(time(63296789L).withOffsetSameInstant(ZoneOffset.ofHours(-5)), conversion.convert("12:34:56.789-05:00")); + + // double check back and forth conversion + OffsetTime ot = org.elasticsearch.common.time.DateUtils.nowWithMillisResolution().toOffsetDateTime().toOffsetTime(); + Conversion forward = conversionFor(TIME, KEYWORD); + Conversion back = conversionFor(KEYWORD, TIME); + assertEquals(ot, back.convert(forward.convert(ot))); + Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert("0xff")); + assertEquals("cannot cast [0xff] to [time]: Text '0xff' could not be parsed at index 0", + e.getMessage()); + } + } + public void testConversionToDateTime() { DataType to = DATETIME; { @@ -192,8 +272,12 @@ public void testConversionToDateTime() { { Conversion conversion = conversionFor(DATE, to); assertNull(conversion.convert(null)); - assertEquals(dateTime(123379200000L), conversion.convert(DateUtils.asDateOnly(123456789101L))); - assertEquals(dateTime(-123465600000L), conversion.convert(DateUtils.asDateOnly(-123456789101L))); + assertEquals(dateTime(123379200000L), conversion.convert(asDateOnly(123456789101L))); + assertEquals(dateTime(-123465600000L), conversion.convert(asDateOnly(-123456789101L))); + } + { + Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversionFor(TIME, to)); + assertEquals("cannot convert from [time] to [datetime]", e.getMessage()); } { Conversion conversion = conversionFor(KEYWORD, to); @@ -217,6 +301,58 @@ public void testConversionToDateTime() { } } + public void testConversionToFloat() { + DataType to = FLOAT; + { + Conversion conversion = conversionFor(DOUBLE, to); + assertNull(conversion.convert(null)); + assertEquals(10.0f, (float) conversion.convert(10.0d), 0.00001); + assertEquals(10.1f, (float) conversion.convert(10.1d), 0.00001); + assertEquals(10.6f, (float) conversion.convert(10.6d), 0.00001); + } + { + Conversion conversion = conversionFor(INTEGER, to); + assertNull(conversion.convert(null)); + assertEquals(10.0f, (float) conversion.convert(10), 0.00001); + assertEquals(-134.0f, (float) conversion.convert(-134), 0.00001); + } + { + Conversion conversion = conversionFor(BOOLEAN, to); + assertNull(conversion.convert(null)); + assertEquals(1.0f, (float) conversion.convert(true), 0); + assertEquals(0.0f, (float) conversion.convert(false), 0); + } + { + Conversion conversion = conversionFor(DATE, to); + assertNull(conversion.convert(null)); + assertEquals(1.233792E11f, (float) conversion.convert(asDateOnly(123456789101L)), 0); + assertEquals(-1.234656E11f, (float) conversion.convert(asDateOnly(-123456789101L)), 0); + } + { + Conversion conversion = conversionFor(TIME, to); + assertNull(conversion.convert(null)); + assertEquals(123456.0f, (float) conversion.convert(asTimeOnly(123456L)), 0); + assertEquals(7.7589104E7f, (float) conversion.convert(asTimeOnly(123456789101L)), 0); + assertEquals(8.6276544E7f, (float) conversion.convert(asTimeOnly(-123456L)), 0); + assertEquals(8810899.0f, (float) conversion.convert(asTimeOnly(-123456789101L)), 0); + } + { + Conversion conversion = conversionFor(DATETIME, to); + assertNull(conversion.convert(null)); + assertEquals(1.23456789101E11f, (float) conversion.convert(asDateTime(123456789101L)), 0); + assertEquals(-1.23456789101E11f, (float) conversion.convert(asDateTime(-123456789101L)), 0); + } + { + Conversion conversion = conversionFor(KEYWORD, to); + assertNull(conversion.convert(null)); + assertEquals(1.0f, (float) conversion.convert("1"), 0); + assertEquals(0.0f, (float) conversion.convert("-0"), 0); + assertEquals(12.776f, (float) conversion.convert("12.776"), 0.00001); + Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert("0xff")); + assertEquals("cannot cast [0xff] to [float]", e.getMessage()); + } + } + public void testConversionToDouble() { DataType to = DOUBLE; { @@ -241,8 +377,16 @@ public void testConversionToDouble() { { Conversion conversion = conversionFor(DATE, to); assertNull(conversion.convert(null)); - assertEquals(1.233792E11, (double) conversion.convert(DateUtils.asDateOnly(123456789101L)), 0); - assertEquals(-1.234656E11, (double) conversion.convert(DateUtils.asDateOnly(-123456789101L)), 0); + assertEquals(1.233792E11, (double) conversion.convert(asDateOnly(123456789101L)), 0); + assertEquals(-1.234656E11, (double) conversion.convert(asDateOnly(-123456789101L)), 0); + } + { + Conversion conversion = conversionFor(TIME, to); + assertNull(conversion.convert(null)); + assertEquals(123456.0, (double) conversion.convert(asTimeOnly(123456L)), 0); + assertEquals(7.7589101E7, (double) conversion.convert(asTimeOnly(123456789101L)), 0); + assertEquals(8.6276544E7, (double) conversion.convert(asTimeOnly(-123456L)), 0); + assertEquals(8810899.0, (double) conversion.convert(asTimeOnly(-123456789101L)), 0); } { Conversion conversion = conversionFor(DATETIME, to); @@ -294,9 +438,16 @@ public void testConversionToBoolean() { { Conversion conversion = conversionFor(DATE, to); assertNull(conversion.convert(null)); - assertEquals(true, conversion.convert(DateUtils.asDateOnly(123456789101L))); - assertEquals(true, conversion.convert(DateUtils.asDateOnly(-123456789101L))); - assertEquals(false, conversion.convert(DateUtils.asDateOnly(0L))); + assertEquals(true, conversion.convert(asDateOnly(123456789101L))); + assertEquals(true, conversion.convert(asDateOnly(-123456789101L))); + assertEquals(false, conversion.convert(asDateOnly(0L))); + } + { + Conversion conversion = conversionFor(TIME, to); + assertNull(conversion.convert(null)); + assertEquals(true, conversion.convert(asTimeOnly(123456789101L))); + assertEquals(true, conversion.convert(asTimeOnly(-123456789101L))); + assertEquals(false, conversion.convert(asTimeOnly(0L))); } { Conversion conversion = conversionFor(DATETIME, to); @@ -343,20 +494,29 @@ public void testConversionToInt() { { Conversion conversion = conversionFor(DATE, to); assertNull(conversion.convert(null)); - assertEquals(0, conversion.convert(DateUtils.asDateOnly(12345678L))); - assertEquals(86400000, conversion.convert(DateUtils.asDateOnly(123456789L))); - assertEquals(172800000, conversion.convert(DateUtils.asDateOnly(223456789L))); - assertEquals(-172800000, conversion.convert(DateUtils.asDateOnly(-123456789L))); - Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(DateUtils.asDateOnly(Long.MAX_VALUE))); + assertEquals(0, conversion.convert(asDateOnly(12345678L))); + assertEquals(86400000, conversion.convert(asDateOnly(123456789L))); + assertEquals(172800000, conversion.convert(asDateOnly(223456789L))); + assertEquals(-172800000, conversion.convert(asDateOnly(-123456789L))); + Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(asDateOnly(Long.MAX_VALUE))); assertEquals("[9223372036828800000] out of [integer] range", e.getMessage()); } + { + Conversion conversion = conversionFor(TIME, to); + assertNull(conversion.convert(null)); + assertEquals(123456, conversion.convert(asTimeOnly(123456L))); + assertEquals(77589101, conversion.convert(asTimeOnly(123456789101L))); + assertEquals(86276544, conversion.convert(asTimeOnly(-123456L))); + assertEquals(8810899, conversion.convert(asTimeOnly(-123456789101L))); + assertEquals(25975807, conversion.convert(asTimeOnly(Long.MAX_VALUE))); + } { Conversion conversion = conversionFor(DATETIME, to); assertNull(conversion.convert(null)); - assertEquals(12345678, conversion.convert(DateUtils.asDateTime(12345678L))); - assertEquals(223456789, conversion.convert(DateUtils.asDateTime(223456789L))); - assertEquals(-123456789, conversion.convert(DateUtils.asDateTime(-123456789L))); - Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(DateUtils.asDateTime(Long.MAX_VALUE))); + assertEquals(12345678, conversion.convert(asDateTime(12345678L))); + assertEquals(223456789, conversion.convert(asDateTime(223456789L))); + assertEquals(-123456789, conversion.convert(asDateTime(-123456789L))); + Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(asDateTime(Long.MAX_VALUE))); assertEquals("[" + Long.MAX_VALUE + "] out of [integer] range", e.getMessage()); } } @@ -375,17 +535,26 @@ public void testConversionToShort() { { Conversion conversion = conversionFor(DATE, to); assertNull(conversion.convert(null)); - assertEquals((short) 0, conversion.convert(DateUtils.asDateOnly(12345678L))); - Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(DateUtils.asDateOnly(123456789L))); + assertEquals((short) 0, conversion.convert(asDateOnly(12345678L))); + Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); assertEquals("[86400000] out of [short] range", e.getMessage()); } + { + Conversion conversion = conversionFor(TIME, to); + assertNull(conversion.convert(null)); + assertEquals((short) 12345, conversion.convert(asTimeOnly(12345L))); + Exception e1 = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(-123456789L))); + assertEquals("[49343211] out of [short] range", e1.getMessage()); + Exception e2 = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); + assertEquals("[37056789] out of [short] range", e2.getMessage()); + } { Conversion conversion = conversionFor(DATETIME, to); assertNull(conversion.convert(null)); - assertEquals((short) 12345, conversion.convert(DateUtils.asDateTime(12345L))); - assertEquals((short) -12345, conversion.convert(DateUtils.asDateTime(-12345L))); + assertEquals((short) 12345, conversion.convert(asDateTime(12345L))); + assertEquals((short) -12345, conversion.convert(asDateTime(-12345L))); Exception e = expectThrows(SqlIllegalArgumentException.class, - () -> conversion.convert(DateUtils.asDateTime(Integer.MAX_VALUE))); + () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); assertEquals("[" + Integer.MAX_VALUE + "] out of [short] range", e.getMessage()); } } @@ -404,17 +573,26 @@ public void testConversionToByte() { { Conversion conversion = conversionFor(DATE, to); assertNull(conversion.convert(null)); - assertEquals((byte) 0, conversion.convert(DateUtils.asDateOnly(12345678L))); - Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(DateUtils.asDateOnly(123456789L))); + assertEquals((byte) 0, conversion.convert(asDateOnly(12345678L))); + Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(asDateOnly(123456789L))); assertEquals("[86400000] out of [byte] range", e.getMessage()); } + { + Conversion conversion = conversionFor(TIME, to); + assertNull(conversion.convert(null)); + assertEquals((byte) 123, conversion.convert(asTimeOnly(123L))); + Exception e1 = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(-123L))); + assertEquals("[86399877] out of [byte] range", e1.getMessage()); + Exception e2 = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert(asTimeOnly(123456789L))); + assertEquals("[37056789] out of [byte] range", e2.getMessage()); + } { Conversion conversion = conversionFor(DATETIME, to); assertNull(conversion.convert(null)); - assertEquals((byte) 123, conversion.convert(DateUtils.asDateTime(123L))); - assertEquals((byte) -123, conversion.convert(DateUtils.asDateTime(-123L))); + assertEquals((byte) 123, conversion.convert(asDateTime(123L))); + assertEquals((byte) -123, conversion.convert(asDateTime(-123L))); Exception e = expectThrows(SqlIllegalArgumentException.class, - () -> conversion.convert(DateUtils.asDateTime(Integer.MAX_VALUE))); + () -> conversion.convert(asDateTime(Integer.MAX_VALUE))); assertEquals("[" + Integer.MAX_VALUE + "] out of [byte] range", e.getMessage()); } } @@ -453,10 +631,16 @@ public void testCommonType() { // dates/datetimes and intervals assertEquals(DATETIME, commonType(DATE, DATETIME)); assertEquals(DATETIME, commonType(DATETIME, DATE)); + assertEquals(DATETIME, commonType(TIME, DATETIME)); + assertEquals(DATETIME, commonType(DATETIME, TIME)); assertEquals(DATETIME, commonType(DATETIME, randomInterval())); assertEquals(DATETIME, commonType(randomInterval(), DATETIME)); + assertEquals(DATETIME, commonType(DATE, TIME)); + assertEquals(DATETIME, commonType(TIME, DATE)); assertEquals(DATE, commonType(DATE, randomInterval())); assertEquals(DATE, commonType(randomInterval(), DATE)); + assertEquals(TIME, commonType(TIME, randomInterval())); + assertEquals(TIME, commonType(randomInterval(), TIME)); assertEquals(INTERVAL_YEAR_TO_MONTH, commonType(INTERVAL_YEAR_TO_MONTH, INTERVAL_MONTH)); assertEquals(INTERVAL_HOUR_TO_SECOND, commonType(INTERVAL_HOUR_TO_MINUTE, INTERVAL_HOUR_TO_SECOND)); @@ -474,7 +658,7 @@ public void testEsDataTypes() { public void testConversionToUnsupported() { Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversionFor(INTEGER, UNSUPPORTED)); - assertEquals("cannot convert from [INTEGER] to [UNSUPPORTED]", e.getMessage()); + assertEquals("cannot convert from [integer] to [unsupported]", e.getMessage()); } public void testStringToIp() { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json index 6b4529650dd8a..b100c7e0a2471 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json @@ -11,6 +11,18 @@ "required": false, "description": "The id of the transform for which to get stats. '_all' or '*' implies all transforms" } + }, + "params": { + "from": { + "type": "number", + "required": false, + "description": "skips a number of transform stats, defaults to 0" + }, + "size": { + "type": "number", + "required": false, + "description": "specifies a max number of transform stats to get, defaults to 100" + } } }, "body": null diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml index ac6aca4f35d4e..88f1e43fd118b 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml @@ -83,29 +83,66 @@ teardown: } } - do: - data_frame.start_data_frame_transform: - transform_id: "airline-transform-stats-dos" + data_frame.put_data_frame_transform: + transform_id: "airline-transform-stats-the-third" + body: > + { + "source": { "index": "airline-data" }, + "dest": { "index": "airline-data-by-airline-stats-the-third" }, + "pivot": { + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} + } + } - do: data_frame.get_data_frame_transform_stats: transform_id: "*" - - match: { count: 2 } + - match: { count: 3 } - match: { transforms.0.id: "airline-transform-stats" } - match: { transforms.1.id: "airline-transform-stats-dos" } + - match: { transforms.2.id: "airline-transform-stats-the-third" } - do: data_frame.get_data_frame_transform_stats: transform_id: "_all" - - match: { count: 2 } + - match: { count: 3 } - match: { transforms.0.id: "airline-transform-stats" } - match: { transforms.1.id: "airline-transform-stats-dos" } + - match: { transforms.2.id: "airline-transform-stats-the-third" } - do: - data_frame.stop_data_frame_transform: - transform_id: "airline-transform-stats-dos" + data_frame.get_data_frame_transform_stats: + transform_id: "airline-transform-stats-dos,airline-transform-stats-the*" + - match: { count: 2 } + - match: { transforms.0.id: "airline-transform-stats-dos" } + - match: { transforms.1.id: "airline-transform-stats-the-third" } + + - do: + data_frame.get_data_frame_transform_stats: + transform_id: "_all" + from: 0 + size: 1 + - match: { count: 1 } + - match: { transforms.0.id: "airline-transform-stats" } + + - do: + data_frame.get_data_frame_transform_stats: + transform_id: "_all" + from: 1 + size: 2 + - match: { count: 2 } + - match: { transforms.0.id: "airline-transform-stats-dos" } + - match: { transforms.1.id: "airline-transform-stats-the-third" } + - do: data_frame.delete_data_frame_transform: transform_id: "airline-transform-stats-dos" + - do: + data_frame.delete_data_frame_transform: + transform_id: "airline-transform-stats-the-third" + + --- "Test get multiple transform stats where one does not have a task": - do: diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index 4e62eedd221d2..adbf43140328b 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -67,6 +67,7 @@ public void stopWebservice() throws Exception { webServer.close(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/35503") public void testHttps() throws Exception { webServer.enqueue(new MockResponse().setResponseCode(200).setBody("body")); HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder("localhost", webServer.getPort()) diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java index 1e2c1ddbc64f1..aff3a62c12cf1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java @@ -38,6 +38,7 @@ public class WatchMetadataTests extends AbstractWatcherIntegrationTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40631") public void testWatchMetadata() throws Exception { Map metadata = new HashMap<>(); metadata.put("foo", "bar"); diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java index 2161ea1fd2aa0..917d73d5af2c4 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java @@ -13,6 +13,10 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.upgrades.AbstractFullClusterRestartTestCase; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -112,6 +116,7 @@ private void oldClusterTests() throws IOException { DatafeedConfig.Builder dfBuilder = new DatafeedConfig.Builder(OLD_CLUSTER_STARTED_DATAFEED_ID, OLD_CLUSTER_OPEN_JOB_ID); dfBuilder.setIndices(Collections.singletonList("airline-data")); + addAggregations(dfBuilder); Request putDatafeed = new Request("PUT", "_xpack/ml/datafeeds/" + OLD_CLUSTER_STARTED_DATAFEED_ID); putDatafeed.setJsonEntity(Strings.toString(dfBuilder.build())); @@ -245,4 +250,11 @@ private void assertJobNotPresent(String jobId, List> jobs) { .filter(id -> id.equals(jobId)).findFirst(); assertFalse(config.isPresent()); } + + private void addAggregations(DatafeedConfig.Builder dfBuilder) { + TermsAggregationBuilder airline = AggregationBuilders.terms("airline"); + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time").subAggregation(airline); + dfBuilder.setParsedAggregations(AggregatorFactories.builder().addAggregator( + AggregationBuilders.histogram("time").interval(300000).subAggregation(maxTime).field("time"))); + } } diff --git a/x-pack/qa/reindex-tests-with-security/build.gradle b/x-pack/qa/reindex-tests-with-security/build.gradle index 3d415e0e2922a..64e1c61b60717 100644 --- a/x-pack/qa/reindex-tests-with-security/build.gradle +++ b/x-pack/qa/reindex-tests-with-security/build.gradle @@ -1,10 +1,4 @@ -import javax.net.ssl.HttpsURLConnection -import javax.net.ssl.KeyManager -import javax.net.ssl.SSLContext -import javax.net.ssl.TrustManagerFactory -import java.nio.charset.StandardCharsets -import java.security.KeyStore -import java.security.SecureRandom +import org.elasticsearch.gradle.http.WaitForHttpResource apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' @@ -57,48 +51,11 @@ integTestCluster { 'bin/elasticsearch-users', 'useradd', user, '-p', 'x-pack-test-password', '-r', role } waitCondition = { node, ant -> - // Load the CA PKCS#12 file as a truststore - KeyStore ks = KeyStore.getInstance("PKCS12"); - ks.load(caFile.newInputStream(), 'password'.toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(ks); - - // Configre a SSL context for TLS1.2 using our CA trust manager - SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); - sslContext.init(new KeyManager[0], tmf.getTrustManagers(), new SecureRandom()); - - // Check whether the cluster has started - URL url = new URL("https://${node.httpUri()}/_cluster/health?wait_for_nodes=${numNodes}&wait_for_status=yellow"); - for (int i = 20; i >= 0; i--) { - // we use custom wait logic here for HTTPS - HttpsURLConnection httpURLConnection = null; - try { - logger.info("Trying ${url}"); - httpURLConnection = (HttpsURLConnection) url.openConnection(); - httpURLConnection.setSSLSocketFactory(sslContext.getSocketFactory()); - httpURLConnection.setRequestProperty("Authorization", - "Basic " + Base64.getEncoder().encodeToString("test_admin:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); - httpURLConnection.setRequestMethod("GET"); - httpURLConnection.connect(); - if (httpURLConnection.getResponseCode() == 200) { - logger.info("Cluster has started"); - return true; - } else { - logger.debug("HTTP response was [{}]", httpURLConnection.getResponseCode()); - } - } catch (IOException e) { - if (i == 0) { - logger.error("Failed to call cluster health - " + e) - } - logger.debug("Call to [{}] threw an exception", url, e) - } finally { - if (httpURLConnection != null) { - httpURLConnection.disconnect(); - } - } - // did not start, so wait a bit before trying again - Thread.sleep(750L); - } - return false; + WaitForHttpResource http = new WaitForHttpResource("https", node.httpUri(), numNodes) + http.setTrustStoreFile(caFile) + http.setTrustStorePassword("password") + http.setUsername("test_admin") + http.setPassword("x-pack-test-password") + return http.wait(5000) } } diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index f689573a61437..0cdbbe71e55ba 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -232,6 +232,8 @@ for (Version version : bwcVersions.wireCompatible) { 'mixed_cluster/10_basic/Start scroll in mixed cluster on upgraded node that we will continue after upgrade', 'mixed_cluster/30_ml_jobs_crud/Create a job in the mixed cluster and write some data', 'mixed_cluster/40_ml_datafeed_crud/Put job and datafeed in mixed cluster', + 'mixed_cluster/40_ml_datafeed_crud/Put job and datafeed without aggs in mixed cluster', + 'mixed_cluster/40_ml_datafeed_crud/Put job and datafeed with aggs in mixed cluster' ].join(',') finalizedBy "${baseName}#oldClusterTestCluster#node1.stop" } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml index 6812245e5a24e..b37d6de4947c7 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml @@ -1,28 +1,94 @@ --- -"Test old cluster datafeed": +"Test old cluster datafeed without aggs": - do: ml.get_datafeeds: - datafeed_id: old-cluster-datafeed - - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed"} + datafeed_id: old-cluster-datafeed-without-aggs + - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-without-aggs"} - length: { datafeeds.0.indices: 1 } - gte: { datafeeds.0.scroll_size: 2000 } + - match: { datafeeds.0.script_fields.double_responsetime.script.lang: painless } + - is_false: datafeeds.0.aggregations - do: ml.get_datafeed_stats: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-without-aggs - match: { datafeeds.0.state: "stopped"} - is_false: datafeeds.0.node --- -"Put job and datafeed in mixed cluster": +"Test old cluster datafeed with aggs": + - do: + ml.get_datafeeds: + datafeed_id: old-cluster-datafeed-with-aggs + - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-with-aggs"} + - length: { datafeeds.0.indices: 1 } + - gte: { datafeeds.0.scroll_size: 2000 } + - is_false: datafeeds.0.script_fields + - match: { datafeeds.0.aggregations.buckets.date_histogram.field: time } + - match: { datafeeds.0.aggregations.buckets.aggregations.time.max.field: time } + + - do: + ml.get_datafeed_stats: + datafeed_id: old-cluster-datafeed-with-aggs + - match: { datafeeds.0.state: "stopped"} + - is_false: datafeeds.0.node + +--- +"Put job and datafeed without aggs in mixed cluster": + - do: + ml.put_job: + job_id: mixed-cluster-datafeed-job-without-aggs + body: > + { + "description":"Cluster upgrade", + "analysis_config" : { + "bucket_span": "60s", + "detectors" :[{"function":"count"}] + }, + "analysis_limits" : { + "model_memory_limit": "50mb" + }, + "data_description" : { + "format":"xcontent", + "time_field":"time" + } + } + - do: + ml.put_datafeed: + datafeed_id: mixed-cluster-datafeed-without-aggs + body: > + { + "job_id":"mixed-cluster-datafeed-job-without-aggs", + "indices":["airline-data"], + "scroll_size": 2000, + "script_fields": { + "double_responsetime": { + "script": { + "lang": "painless", + "source": "doc['responsetime'].value * 2" + } + } + } + } + + - do: + ml.get_datafeed_stats: + datafeed_id: mixed-cluster-datafeed-without-aggs + - match: { datafeeds.0.state: stopped} + - is_false: datafeeds.0.node + +--- +"Put job and datafeed with aggs in mixed cluster": + - do: ml.put_job: - job_id: mixed-cluster-datafeed-job + job_id: mixed-cluster-datafeed-job-with-aggs body: > { "description":"Cluster upgrade", "analysis_config" : { "bucket_span": "60s", + "summary_count_field_name": "doc_count", "detectors" :[{"function":"count"}] }, "analysis_limits" : { @@ -36,16 +102,43 @@ - do: ml.put_datafeed: - datafeed_id: mixed-cluster-datafeed + datafeed_id: mixed-cluster-datafeed-with-aggs body: > { - "job_id":"mixed-cluster-datafeed-job", + "job_id":"mixed-cluster-datafeed-job-with-aggs", "indices":["airline-data"], - "scroll_size": 2000 + "scroll_size": 2000, + "aggregations": { + "buckets": { + "date_histogram": { + "field": "time", + "interval": "30s", + "time_zone": "UTC" + }, + "aggregations": { + "time": { + "max": {"field": "time"} + }, + "airline": { + "terms": { + "field": "airline", + "size": 100 + }, + "aggregations": { + "responsetime": { + "avg": { + "field": "responsetime" + } + } + } + } + } + } + } } - do: ml.get_datafeed_stats: - datafeed_id: mixed-cluster-datafeed + datafeed_id: mixed-cluster-datafeed-with-aggs - match: { datafeeds.0.state: stopped} - is_false: datafeeds.0.node diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml index 269ecf3c677e6..597540d36c4ec 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml @@ -1,8 +1,9 @@ --- -"Put job and datafeed in old cluster": +"Put job and datafeed without aggs in old cluster": + - do: ml.put_job: - job_id: old-cluster-datafeed-job + job_id: old-cluster-datafeed-job-without-aggs body: > { "description":"Cluster upgrade", @@ -18,20 +19,95 @@ "time_field":"time" } } - - match: { job_id: old-cluster-datafeed-job } + - match: { job_id: old-cluster-datafeed-job-without-aggs } - do: ml.put_datafeed: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-without-aggs body: > { - "job_id":"old-cluster-datafeed-job", + "job_id":"old-cluster-datafeed-job-without-aggs", "indices":["airline-data"], - "scroll_size": 2000 + "scroll_size": 2000, + "script_fields": { + "double_responsetime": { + "script": { + "lang": "painless", + "source": "doc['responsetime'].value * 2" + } + } + } + } + + - do: + ml.get_datafeed_stats: + datafeed_id: old-cluster-datafeed-without-aggs + - match: { datafeeds.0.state: stopped} + - is_false: datafeeds.0.node + +--- +"Put job and datafeed with aggs in old cluster": + + - do: + ml.put_job: + job_id: old-cluster-datafeed-job-with-aggs + body: > + { + "description":"Cluster upgrade", + "analysis_config" : { + "bucket_span": "60s", + "summary_count_field_name": "doc_count", + "detectors" :[{"function":"count"}] + }, + "analysis_limits" : { + "model_memory_limit": "50mb" + }, + "data_description" : { + "format":"xcontent", + "time_field":"time" + } + } + - match: { job_id: old-cluster-datafeed-job-with-aggs } + + - do: + ml.put_datafeed: + datafeed_id: old-cluster-datafeed-with-aggs + body: > + { + "job_id":"old-cluster-datafeed-job-with-aggs", + "indices":["airline-data"], + "scroll_size": 2000, + "aggregations": { + "buckets": { + "date_histogram": { + "field": "time", + "interval": "30s", + "time_zone": "UTC" + }, + "aggregations": { + "time": { + "max": {"field": "time"} + }, + "airline": { + "terms": { + "field": "airline", + "size": 100 + }, + "aggregations": { + "responsetime": { + "avg": { + "field": "responsetime" + } + } + } + } + } + } + } } - do: ml.get_datafeed_stats: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-with-aggs - match: { datafeeds.0.state: stopped} - is_false: datafeeds.0.node diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml index 928fb3a066c28..cee6af0df76ad 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml @@ -5,7 +5,9 @@ setup: wait_for_nodes: 3 # wait for long enough that we give delayed unassigned shards to stop being delayed timeout: 70s - + +--- +"Test old and mixed cluster datafeeds without aggs": - do: indices.create: index: airline-data @@ -15,82 +17,187 @@ setup: time: type: date + - do: + ml.get_datafeeds: + datafeed_id: old-cluster-datafeed-without-aggs + - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-without-aggs"} + - length: { datafeeds.0.indices: 1 } + - gte: { datafeeds.0.scroll_size: 2000 } + - match: { datafeeds.0.script_fields.double_responsetime.script.lang: painless } + - is_false: datafeeds.0.aggregations + + - do: + ml.get_datafeed_stats: + datafeed_id: old-cluster-datafeed-without-aggs + - match: { datafeeds.0.state: "stopped"} + - is_false: datafeeds.0.node + + - do: + ml.get_datafeeds: + datafeed_id: mixed-cluster-datafeed-without-aggs + - match: { datafeeds.0.datafeed_id: "mixed-cluster-datafeed-without-aggs"} + - length: { datafeeds.0.indices: 1 } + - gte: { datafeeds.0.scroll_size: 2000 } + - match: { datafeeds.0.script_fields.double_responsetime.script.lang: painless } + - is_false: datafeeds.0.aggregations + + - do: + ml.get_datafeed_stats: + datafeed_id: mixed-cluster-datafeed-without-aggs + - match: { datafeeds.0.state: "stopped"} + - is_false: datafeeds.0.node + + - do: + ml.open_job: + job_id: old-cluster-datafeed-job-without-aggs + + - do: + ml.start_datafeed: + datafeed_id: old-cluster-datafeed-without-aggs + start: 0 + + - do: + ml.stop_datafeed: + datafeed_id: old-cluster-datafeed-without-aggs + + - do: + ml.close_job: + job_id: old-cluster-datafeed-job-without-aggs + + - do: + ml.delete_datafeed: + datafeed_id: old-cluster-datafeed-without-aggs + + - do: + ml.delete_job: + job_id: old-cluster-datafeed-job-without-aggs + - match: { acknowledged: true } + + - do: + ml.open_job: + job_id: mixed-cluster-datafeed-job-without-aggs + + - do: + ml.start_datafeed: + datafeed_id: mixed-cluster-datafeed-without-aggs + start: 0 + + - do: + ml.stop_datafeed: + datafeed_id: mixed-cluster-datafeed-without-aggs + + - do: + ml.close_job: + job_id: mixed-cluster-datafeed-job-without-aggs + + - do: + ml.delete_datafeed: + datafeed_id: mixed-cluster-datafeed-without-aggs + + - do: + ml.delete_job: + job_id: mixed-cluster-datafeed-job-without-aggs + - match: { acknowledged: true } + + - do: + indices.delete: + index: airline-data + --- -"Test old and mixed cluster datafeeds": +"Test old and mixed cluster datafeeds with aggs": + - do: + indices.create: + index: airline-data + body: + mappings: + properties: + time: + type: date + - do: ml.get_datafeeds: - datafeed_id: old-cluster-datafeed - - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed"} + datafeed_id: old-cluster-datafeed-with-aggs + - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-with-aggs"} - length: { datafeeds.0.indices: 1 } - gte: { datafeeds.0.scroll_size: 2000 } + - is_false: datafeeds.0.script_fields + - match: { datafeeds.0.aggregations.buckets.date_histogram.field: time } + - match: { datafeeds.0.aggregations.buckets.aggregations.time.max.field: time } - do: ml.get_datafeed_stats: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-with-aggs - match: { datafeeds.0.state: "stopped"} - is_false: datafeeds.0.node - do: ml.get_datafeeds: - datafeed_id: mixed-cluster-datafeed - - match: { datafeeds.0.datafeed_id: "mixed-cluster-datafeed"} + datafeed_id: mixed-cluster-datafeed-with-aggs + - match: { datafeeds.0.datafeed_id: "mixed-cluster-datafeed-with-aggs"} - length: { datafeeds.0.indices: 1 } - gte: { datafeeds.0.scroll_size: 2000 } + - is_false: datafeeds.0.script_fields + - match: { datafeeds.0.aggregations.buckets.date_histogram.field: time } + - match: { datafeeds.0.aggregations.buckets.aggregations.time.max.field: time } - do: ml.get_datafeed_stats: - datafeed_id: mixed-cluster-datafeed + datafeed_id: mixed-cluster-datafeed-with-aggs - match: { datafeeds.0.state: "stopped"} - is_false: datafeeds.0.node - do: ml.open_job: - job_id: old-cluster-datafeed-job + job_id: old-cluster-datafeed-job-with-aggs - do: ml.start_datafeed: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-with-aggs start: 0 - do: ml.stop_datafeed: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-with-aggs - do: ml.close_job: - job_id: old-cluster-datafeed-job + job_id: old-cluster-datafeed-job-with-aggs - do: ml.delete_datafeed: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-with-aggs - do: ml.delete_job: - job_id: old-cluster-datafeed-job + job_id: old-cluster-datafeed-job-with-aggs - match: { acknowledged: true } - do: ml.open_job: - job_id: mixed-cluster-datafeed-job + job_id: mixed-cluster-datafeed-job-with-aggs - do: ml.start_datafeed: - datafeed_id: mixed-cluster-datafeed + datafeed_id: mixed-cluster-datafeed-with-aggs start: 0 - do: ml.stop_datafeed: - datafeed_id: mixed-cluster-datafeed + datafeed_id: mixed-cluster-datafeed-with-aggs - do: ml.close_job: - job_id: mixed-cluster-datafeed-job + job_id: mixed-cluster-datafeed-job-with-aggs - do: ml.delete_datafeed: - datafeed_id: mixed-cluster-datafeed + datafeed_id: mixed-cluster-datafeed-with-aggs - do: ml.delete_job: - job_id: mixed-cluster-datafeed-job + job_id: mixed-cluster-datafeed-job-with-aggs - match: { acknowledged: true } + + - do: + indices.delete: + index: airline-data diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index 5721815f07856..e88eac3028f3d 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -1,13 +1,6 @@ import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.test.NodeInfo - -import javax.net.ssl.HttpsURLConnection -import javax.net.ssl.KeyManager -import javax.net.ssl.SSLContext -import javax.net.ssl.TrustManagerFactory -import java.nio.charset.StandardCharsets -import java.security.KeyStore -import java.security.SecureRandom +import org.elasticsearch.gradle.http.WaitForHttpResource apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' @@ -86,45 +79,12 @@ integTestCluster { 'bin/elasticsearch-users', 'useradd', 'monitoring_agent', '-p', 'x-pack-test-password', '-r', 'remote_monitoring_agent' waitCondition = { NodeInfo node, AntBuilder ant -> - File tmpFile = new File(node.cwd, 'wait.success') - KeyStore keyStore = KeyStore.getInstance("JKS"); - keyStore.load(clientKeyStore.newInputStream(), 'testclient'.toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(keyStore); - // We don't need a KeyManager as there won't be client auth required so pass an empty array - SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); - sslContext.init(new KeyManager[0], tmf.getTrustManagers(), new SecureRandom()); - for (int i = 0; i < 10; i++) { - // we use custom wait logic here for HTTPS - HttpsURLConnection httpURLConnection = null; - try { - httpURLConnection = (HttpsURLConnection) new URL("https://${node.httpUri()}/_cluster/health?wait_for_nodes=${numNodes}&wait_for_status=yellow").openConnection(); - httpURLConnection.setSSLSocketFactory(sslContext.getSocketFactory()); - httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); - httpURLConnection.setRequestMethod("GET"); - httpURLConnection.connect(); - if (httpURLConnection.getResponseCode() == 200) { - tmpFile.withWriter StandardCharsets.UTF_8.name(), { - it.write(httpURLConnection.getInputStream().getText(StandardCharsets.UTF_8.name())) - } - } - } catch (IOException e) { - if (i == 9) { - logger.error("final attempt of calling cluster health failed", e) - } else { - logger.debug("failed to call cluster health", e) - } - } finally { - if (httpURLConnection != null) { - httpURLConnection.disconnect(); - } - } - - // did not start, so wait a bit before trying again - Thread.sleep(500L); - } - return tmpFile.exists() + WaitForHttpResource http = new WaitForHttpResource("https", node.httpUri(), numNodes) + http.setTrustStoreFile(clientKeyStore) + http.setTrustStorePassword("testclient") + http.setUsername("test_user") + http.setPassword("x-pack-test-password") + return http.wait(5000) } }