From 65e031742d29e5b2050d676c8d0c1015866318bc Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Thu, 28 Mar 2019 17:23:40 +0200 Subject: [PATCH 01/63] Test fixtures krb5 (#40297) Replaces the vagrant based kerberos fixtures with docker based test fixtures plugin. The configuration is now entirely static on the docker side and no longer driven by Gradle, also two different services are being configured since there are two different consumers of the fixture that can run in parallel and require different configurations. --- .../testfixtures/TestFixturesPlugin.java | 4 +- plugins/repository-hdfs/build.gradle | 121 +++++------------- test/fixtures/hdfs-fixture/Dockerfile | 8 ++ test/fixtures/hdfs-fixture/build.gradle | 28 ++-- test/fixtures/hdfs-fixture/docker-compose.yml | 11 ++ .../src/main/java/hdfs/MiniHDFS.java | 1 - test/fixtures/krb5kdc-fixture/Dockerfile | 9 ++ test/fixtures/krb5kdc-fixture/Vagrantfile | 53 -------- test/fixtures/krb5kdc-fixture/build.gradle | 78 ++++------- .../krb5kdc-fixture/docker-compose.yml | 24 ++++ .../src/main/resources/provision/addprinc.sh | 11 +- .../src/main/resources/provision/hdfs.sh | 11 ++ .../main/resources/provision/installkdc.sh | 30 +---- .../resources/provision/krb5.conf.template | 6 +- .../src/main/resources/provision/peppa.sh | 13 ++ x-pack/qa/kerberos-tests/build.gradle | 105 +++------------ 16 files changed, 184 insertions(+), 329 deletions(-) create mode 100644 test/fixtures/hdfs-fixture/Dockerfile create mode 100644 test/fixtures/krb5kdc-fixture/Dockerfile delete mode 100644 test/fixtures/krb5kdc-fixture/Vagrantfile create mode 100644 test/fixtures/krb5kdc-fixture/docker-compose.yml create mode 100644 test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh create mode 100644 test/fixtures/krb5kdc-fixture/src/main/resources/provision/peppa.sh diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java index 59cb851974cb5..57f77d6d1a256 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java @@ -31,7 +31,6 @@ import org.gradle.api.Task; import org.gradle.api.plugins.BasePlugin; import org.gradle.api.plugins.ExtraPropertiesExtension; -import org.gradle.api.tasks.Input; import org.gradle.api.tasks.TaskContainer; import java.lang.reflect.InvocationTargetException; @@ -104,6 +103,7 @@ public void apply(Project project) { "but none could be found so these will be skipped", project.getPath() ); disableTaskByType(tasks, getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask")); + disableTaskByType(tasks, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask")); // conventions are not honored when the tasks are disabled disableTaskByType(tasks, TestingConventionsTasks.class); disableTaskByType(tasks, ComposeUp.class); @@ -122,6 +122,7 @@ public void apply(Project project) { fixtureProject, (name, port) -> setSystemProperty(task, name, port) ); + task.dependsOn(fixtureProject.getTasks().getByName("postProcessFixture")); }) ); @@ -155,7 +156,6 @@ private void configureServiceInfoForTask(Task task, Project fixtureProject, BiCo ); } - @Input public boolean dockerComposeSupported(Project project) { if (OS.current().equals(OS.WINDOWS)) { return false; diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 34323fb930fce..946b377491d26 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -24,18 +24,19 @@ import org.elasticsearch.gradle.test.RestIntegTestTask import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths - +apply plugin: 'elasticsearch.test.fixtures' + esplugin { description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' } -apply plugin: 'elasticsearch.vagrantsupport' - versions << [ 'hadoop2': '2.8.1' ] +testFixtures.useFixture ":test:fixtures:krb5kdc-fixture" + configurations { hdfsFixture } @@ -68,67 +69,27 @@ dependencyLicenses { mapping from: /hadoop-.*/, to: 'hadoop' } -// MIT Kerberos Vagrant Testing Fixture -String box = "krb5kdc" -Map vagrantEnvVars = [ - 'VAGRANT_CWD' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}", - 'VAGRANT_VAGRANTFILE' : 'Vagrantfile', - 'VAGRANT_PROJECT_DIR' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}" -] - -task krb5kdcUpdate(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'box' - subcommand 'update' - boxName box - environmentVars vagrantEnvVars - dependsOn "vagrantCheckVersion", "virtualboxCheckVersion" -} - -task krb5kdcFixture(type: org.elasticsearch.gradle.test.VagrantFixture) { - command 'up' - args '--provision', '--provider', 'virtualbox' - boxName box - environmentVars vagrantEnvVars - dependsOn krb5kdcUpdate -} - -task krb5AddPrincipals { - dependsOn krb5kdcFixture -} -List principals = [ "elasticsearch", "hdfs/hdfs.build.elastic.co" ] String realm = "BUILD.ELASTIC.CO" -for (String principal : principals) { - Task create = project.tasks.create("addPrincipal#${principal}".replace('/', '_'), org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'ssh' - args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh $principal" - boxName box - environmentVars vagrantEnvVars - dependsOn krb5kdcFixture - } - krb5AddPrincipals.dependsOn(create) -} // Create HDFS File System Testing Fixtures for HA/Secure combinations for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) { project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) { - dependsOn project.configurations.hdfsFixture + dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture executable = new File(project.runtimeJavaHome, 'bin/java') env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }" waitCondition = { fixture, ant -> // the hdfs.MiniHDFS fixture writes the ports file when // it's ready, so we can just wait for the file to exist return fixture.portsFile.exists() - } + } final List miniHDFSArgs = [] // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { - dependsOn krb5kdcFixture, krb5AddPrincipals - Path krb5Config = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf") - miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5Config}"); + miniHDFSArgs.add("-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")}"); if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED') } @@ -145,9 +106,11 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', // If it's a secure fixture, then set the principal name and keytab locations to use for auth. if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { - Path keytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab") miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") - miniHDFSArgs.add("${keytabPath}") + miniHDFSArgs.add( + project(':test:fixtures:krb5kdc-fixture') + .ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") + ) } args miniHDFSArgs.toArray() @@ -170,10 +133,11 @@ project.afterEvaluate { // If it's a secure cluster, add the keytab as an extra config, and set the krb5 conf in the JVM options. if (integTestTaskName.equals('integTestSecure') || integTestTaskName.equals('integTestSecureHa')) { - Path elasticsearchKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("elasticsearch.keytab").toAbsolutePath() - Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath() - - restIntegTestTask.clusterConfig.extraConfigFile("repository-hdfs/krb5.keytab", "${elasticsearchKT}") + String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs") + restIntegTestTask.clusterConfig.extraConfigFile( + "repository-hdfs/krb5.keytab", + "${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}" + ) jvmArgs = jvmArgs + " " + "-Djava.security.krb5.conf=${krb5conf}" if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { jvmArgs = jvmArgs + " " + '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED' @@ -189,9 +153,10 @@ project.afterEvaluate { if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { restIntegTestTaskRunner.jvmArg '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED' } - - Path hdfsKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab").toAbsolutePath() - restIntegTestTaskRunner.systemProperty "test.krb5.keytab.hdfs", "${hdfsKT}" + restIntegTestTaskRunner.systemProperty ( + "test.krb5.keytab.hdfs", + project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab") + ) } } @@ -269,41 +234,25 @@ if (fixtureSupported) { integTestHa.setEnabled(false) } -// Secure HDFS testing relies on the Vagrant based Kerberos fixture. -boolean secureFixtureSupported = false -if (fixtureSupported) { - secureFixtureSupported = project.rootProject.vagrantSupported -} - -if (secureFixtureSupported) { - project.check.dependsOn(integTestSecure) - project.check.dependsOn(integTestSecureHa) +check.dependsOn(integTestSecure, integTestSecureHa) - // Fixture dependencies - integTestSecureCluster.dependsOn secureHdfsFixture, krb5kdcFixture - integTestSecureHaCluster.dependsOn secureHaHdfsFixture, krb5kdcFixture +// Fixture dependencies +integTestSecureCluster.dependsOn secureHdfsFixture +integTestSecureHaCluster.dependsOn secureHaHdfsFixture - // Set the keytab files in the classpath so that we can access them from test code without the security manager - // freaking out. - Path hdfsKeytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs") - project.dependencies { - testRuntime fileTree(dir: hdfsKeytabPath.toString(), include: ['*.keytab']) - } - - // Run just the secure hdfs rest test suite. - integTestSecureRunner.systemProperty 'tests.rest.suite', 'secure_hdfs_repository' - // Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner. - integTestSecureRunner.exclude('**/Ha*TestSuiteIT.class') - - // Only include the HA integration tests for the HA test task - integTestSecureHaRunner.patternSet.setIncludes(['**/Ha*TestSuiteIT.class']) -} else { - // Security tests unsupported. Don't run these tests. - integTestSecure.enabled = false - integTestSecureHa.enabled = false - testingConventions.enabled = false +// Set the keytab files in the classpath so that we can access them from test code without the security manager +// freaking out. +project.dependencies { + testRuntime fileTree(dir: project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab").parent, include: ['*.keytab']) } +// Run just the secure hdfs rest test suite. +integTestSecureRunner.systemProperty 'tests.rest.suite', 'secure_hdfs_repository' +// Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner. +integTestSecureRunner.exclude('**/Ha*TestSuiteIT.class') +// Only include the HA integration tests for the HA test task +integTestSecureHaRunner.patternSet.setIncludes(['**/Ha*TestSuiteIT.class']) + thirdPartyAudit { ignoreMissingClasses() ignoreViolations ( diff --git a/test/fixtures/hdfs-fixture/Dockerfile b/test/fixtures/hdfs-fixture/Dockerfile new file mode 100644 index 0000000000000..b9d0e60b7d6d4 --- /dev/null +++ b/test/fixtures/hdfs-fixture/Dockerfile @@ -0,0 +1,8 @@ +FROM java:8-jre + +RUN apt-get update && apt-get install net-tools + +EXPOSE 9998 +EXPOSE 9999 + +CMD java -cp "/fixture:/fixture/*" hdfs.MiniHDFS /data \ No newline at end of file diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 3f08ca7970ca7..f2aebda46b875 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -18,25 +18,23 @@ */ apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.test.fixtures' -versions << [ - 'hadoop2': '2.8.1' -] - -// we create MiniHdfsCluster with the hadoop artifact dependencies { - compile "org.apache.hadoop:hadoop-minicluster:${versions.hadoop2}" + compile "org.apache.hadoop:hadoop-minicluster:2.8.1" +} + +task syncClasses(type: Sync) { + from sourceSets.test.runtimeClasspath + into "${buildDir}/fixture" } -// for testing, until fixtures are actually debuggable. -// gradle hides *EVERYTHING* so you have no clue what went wrong. -task hdfs(type: JavaExec) { - classpath = sourceSets.test.compileClasspath + sourceSets.test.output - main = "hdfs.MiniHDFS" - args = [ 'build/fixtures/hdfsFixture' ] +preProcessFixture { + dependsOn syncClasses + + doLast { + file("${buildDir}/shared").mkdirs() + } } -// just a test fixture: we aren't using jars in releases -thirdPartyAudit.enabled = false -// TODO: add a simple HDFS client test for this fixture unitTest.enabled = false diff --git a/test/fixtures/hdfs-fixture/docker-compose.yml b/test/fixtures/hdfs-fixture/docker-compose.yml index e69de29bb2d1d..5bdc40b1f7246 100644 --- a/test/fixtures/hdfs-fixture/docker-compose.yml +++ b/test/fixtures/hdfs-fixture/docker-compose.yml @@ -0,0 +1,11 @@ +version: '3' +services: + hdfs: + hostname: hdfs.build.elastic.co + build: + context: . + dockerfile: Dockerfile + volumes: + - ./build/fixture:/fixture + ports: + - "9999:9999" diff --git a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java index ce7401fe25cae..01315cdab01ca 100644 --- a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java +++ b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java @@ -98,7 +98,6 @@ public static void main(String[] args) throws Exception { UserGroupInformation.setConfiguration(cfg); - // TODO: remove hardcoded port! MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); if (secure) { builder.nameNodePort(9998); diff --git a/test/fixtures/krb5kdc-fixture/Dockerfile b/test/fixtures/krb5kdc-fixture/Dockerfile new file mode 100644 index 0000000000000..50de6334b9c78 --- /dev/null +++ b/test/fixtures/krb5kdc-fixture/Dockerfile @@ -0,0 +1,9 @@ +FROM ubuntu:14.04 +ADD . /fixture +RUN echo kerberos.build.elastic.co > /etc/hostname && echo "127.0.0.1 kerberos.build.elastic.co" >> /etc/hosts +RUN bash /fixture/src/main/resources/provision/installkdc.sh + +EXPOSE 88 +EXPOSE 88/udp + +CMD sleep infinity \ No newline at end of file diff --git a/test/fixtures/krb5kdc-fixture/Vagrantfile b/test/fixtures/krb5kdc-fixture/Vagrantfile deleted file mode 100644 index 72be4dad9cbe5..0000000000000 --- a/test/fixtures/krb5kdc-fixture/Vagrantfile +++ /dev/null @@ -1,53 +0,0 @@ -# -*- mode: ruby -*- -# vi: set ft=ruby : - -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# This Vagrantfile exists to define a virtual machine running MIT's Kerberos 5 -# for usage as a testing fixture for the build process. -# -# In order to connect to the KDC process on this virtual machine, find and use -# the rendered krb5.conf file in the build output directory (build/conf). -# -# In order to provision principals in the KDC, use the provided addprinc.sh -# script with vagrant's ssh facility: -# -# vagrant ssh -c /vagrant/src/main/resources/provision/addprinc.sh principal -# -# You will find the newly created principal's keytab file in the build output -# directory (build/keytabs). Principal creation is idempotent, and will recopy -# existing user keytabs from the KDC if they already exist. - -Vagrant.configure("2") do |config| - - config.vm.define "krb5kdc" do |config| - config.vm.box = "elastic/ubuntu-14.04-x86_64" - end - - config.vm.hostname = "kerberos.build.elastic.co" - - if Vagrant.has_plugin?("vagrant-cachier") - config.cache.scope = :box - end - - config.vm.network "forwarded_port", guest: 88, host: 60088, protocol: "tcp" - config.vm.network "forwarded_port", guest: 88, host: 60088, protocol: "udp" - - config.vm.provision "shell", path: "src/main/resources/provision/installkdc.sh" - -end diff --git a/test/fixtures/krb5kdc-fixture/build.gradle b/test/fixtures/krb5kdc-fixture/build.gradle index 685483d534771..a3ca8d41bc4d9 100644 --- a/test/fixtures/krb5kdc-fixture/build.gradle +++ b/test/fixtures/krb5kdc-fixture/build.gradle @@ -16,68 +16,38 @@ * specific language governing permissions and limitations * under the License. */ +apply plugin: 'elasticsearch.test.fixtures' -apply plugin: 'elasticsearch.build' - -Map vagrantEnvVars = [ - 'VAGRANT_CWD' : "${project.projectDir.absolutePath}", - 'VAGRANT_VAGRANTFILE' : 'Vagrantfile', - 'VAGRANT_PROJECT_DIR' : "${project.projectDir.absolutePath}" -] - -String box = "krb5kdc" - -List defaultPrincipals = [ "elasticsearch" ] - -task update(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'box' - subcommand 'update' - boxName box - environmentVars vagrantEnvVars +// installKDC uses tabs in it for the Kerberos ACL file. +// Ignore it for pattern checking. +forbiddenPatterns { + exclude "**/installkdc.sh" } -task up(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'up' - args '--provision', '--provider', 'virtualbox' - boxName box - environmentVars vagrantEnvVars - dependsOn update -} +List services = ["peppa", "hdfs"] -task addDefaultPrincipals { - dependsOn up +preProcessFixture.doLast { + // We need to create these up-front because if docker creates them they will be owned by root and we won't be + // able to clean them up + services.each { file("${buildDir}/shared/${it}").mkdirs() } } -for (String principal : defaultPrincipals) { - Task addTask = project.tasks.create("addPrincipal#${principal}", org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'ssh' - args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh $principal" - boxName box - environmentVars vagrantEnvVars - dependsOn up +postProcessFixture { + inputs.dir("${buildDir}/shared") + services.each { service -> + File confTemplate = file("${buildDir}/shared/${service}/krb5.conf.template") + File confFile = file("${buildDir}/shared/${service}/krb5.conf") + outputs.file(confFile) + doLast { + assert confTemplate.exists() + String confContents = confTemplate.text + .replace("\${MAPPED_PORT}", "${ext."test.fixtures.${service}.udp.88"}") + confFile.text = confContents + } } - addDefaultPrincipals.dependsOn(addTask) } -task halt(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'halt' - boxName box - environmentVars vagrantEnvVars -} - -task destroy(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'destroy' - args '-f' - boxName box - environmentVars vagrantEnvVars - dependsOn halt -} +project.ext.krb5Conf = { service -> file("$buildDir/shared/${service}/krb5.conf") } +project.ext.krb5Keytabs = { service, fileName -> file("$buildDir/shared/${service}/keytabs/${fileName}") } -thirdPartyAudit.enabled = false unitTest.enabled = false - -// installKDC uses tabs in it for the Kerberos ACL file. -// Ignore it for pattern checking. -forbiddenPatterns { - exclude "**/installkdc.sh" -} diff --git a/test/fixtures/krb5kdc-fixture/docker-compose.yml b/test/fixtures/krb5kdc-fixture/docker-compose.yml new file mode 100644 index 0000000000000..4d018dd6c3e08 --- /dev/null +++ b/test/fixtures/krb5kdc-fixture/docker-compose.yml @@ -0,0 +1,24 @@ +version: '3' +services: + peppa: + hostname: kerberos.build.elastic.co + build: + context: . + dockerfile: Dockerfile + command: "bash /fixture/src/main/resources/provision/peppa.sh" + volumes: + - ./build/shared/peppa:/fixture/build + ports: + - "4444" + - "88/udp" + hdfs: + hostname: kerberos.build.elastic.co + build: + context: . + dockerfile: Dockerfile + command: "bash /fixture/src/main/resources/provision/hdfs.sh" + volumes: + - ./build/shared/hdfs:/fixture/build + ports: + - "4444" + - "88/udp" diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh index d0d1570ae299a..9fc2a0735d666 100755 --- a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh @@ -19,6 +19,9 @@ set -e +krb5kdc +kadmind + if [[ $# -lt 1 ]]; then echo 'Usage: addprinc.sh principalName [password]' echo ' principalName user principal name without realm' @@ -30,7 +33,7 @@ PRINC="$1" PASSWD="$2" USER=$(echo $PRINC | tr "/" "_") -VDIR=/vagrant +VDIR=/fixture RESOURCES=$VDIR/src/main/resources PROV_DIR=$RESOURCES/provision ENVPROP_FILE=$RESOURCES/env.properties @@ -64,3 +67,9 @@ else sudo kadmin -p $ADMIN_PRIN -kt $ADMIN_KTAB -q "addprinc -pw $PASSWD $PRINC" fi fi + +echo "Copying conf to local" +# make the configuration available externally +cp -v $LOCALSTATEDIR/krb5.conf $BUILD_DIR/krb5.conf.template +# We are running as root in the container, allow non root users running the container to be able to clean these up +chmod -R 777 $BUILD_DIR \ No newline at end of file diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh new file mode 100644 index 0000000000000..ef5bba076444c --- /dev/null +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -e + +addprinc.sh "elasticsearch" +addprinc.sh "hdfs/hdfs.build.elastic.co" + +# Use this as a signal that setup is complete +python3 -m http.server 4444 & + +sleep infinity \ No newline at end of file diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/installkdc.sh b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/installkdc.sh index 2dc8ed92c9462..51af7984ce476 100755 --- a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/installkdc.sh +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/installkdc.sh @@ -22,32 +22,15 @@ set -e # KDC installation steps and considerations based on https://web.mit.edu/kerberos/krb5-latest/doc/admin/install_kdc.html # and helpful input from https://help.ubuntu.com/community/Kerberos -VDIR=/vagrant +VDIR=/fixture RESOURCES=$VDIR/src/main/resources PROV_DIR=$RESOURCES/provision ENVPROP_FILE=$RESOURCES/env.properties -BUILD_DIR=$VDIR/build -CONF_DIR=$BUILD_DIR/conf -KEYTAB_DIR=$BUILD_DIR/keytabs LOCALSTATEDIR=/etc LOGDIR=/var/log/krb5 MARKER_FILE=/etc/marker -# Output location for our rendered configuration files and keytabs -mkdir -p $BUILD_DIR -rm -rf $BUILD_DIR/* -mkdir -p $CONF_DIR -mkdir -p $KEYTAB_DIR - -if [ -f $MARKER_FILE ]; then - echo "Already provisioned..." - echo "Recopying configuration files..." - cp $LOCALSTATEDIR/krb5.conf $CONF_DIR/krb5.conf - cp $LOCALSTATEDIR/krb5kdc/kdc.conf $CONF_DIR/kdc.conf - exit 0; -fi - # Pull environment information REALM_NAME=$(cat $ENVPROP_FILE | grep realm= | cut -d '=' -f 2) KDC_NAME=$(cat $ENVPROP_FILE | grep kdc= | cut -d '=' -f 2) @@ -60,7 +43,7 @@ sed -i 's/${REALM_NAME}/'$REALM_NAME'/g' $LOCALSTATEDIR/krb5.conf sed -i 's/${KDC_NAME}/'$KDC_NAME'/g' $LOCALSTATEDIR/krb5.conf sed -i 's/${BUILD_ZONE}/'$BUILD_ZONE'/g' $LOCALSTATEDIR/krb5.conf sed -i 's/${ELASTIC_ZONE}/'$ELASTIC_ZONE'/g' $LOCALSTATEDIR/krb5.conf -cp $LOCALSTATEDIR/krb5.conf $CONF_DIR/krb5.conf + # Transfer and interpolate the kdc.conf mkdir -p $LOCALSTATEDIR/krb5kdc @@ -69,7 +52,6 @@ sed -i 's/${REALM_NAME}/'$REALM_NAME'/g' $LOCALSTATEDIR/krb5kdc/kdc.conf sed -i 's/${KDC_NAME}/'$KDC_NAME'/g' $LOCALSTATEDIR/krb5kdc/kdc.conf sed -i 's/${BUILD_ZONE}/'$BUILD_ZONE'/g' $LOCALSTATEDIR/krb5kdc/kdc.conf sed -i 's/${ELASTIC_ZONE}/'$ELASTIC_ZONE'/g' $LOCALSTATEDIR/krb5.conf -cp $LOCALSTATEDIR/krb5kdc/kdc.conf $CONF_DIR/kdc.conf # Touch logging locations mkdir -p $LOGDIR @@ -112,9 +94,5 @@ EOF kadmin.local -q "addprinc -pw elastic admin/admin@$REALM_NAME" kadmin.local -q "ktadd -k /etc/admin.keytab admin/admin@$REALM_NAME" -# Start Kerberos Services -krb5kdc -kadmind - -# Mark that the vm is already provisioned -touch $MARKER_FILE \ No newline at end of file +# Create a link so addprinc.sh is on path +ln -s $PROV_DIR/addprinc.sh /usr/bin/ \ No newline at end of file diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/krb5.conf.template b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/krb5.conf.template index e572c12e70957..9504b49bc7301 100644 --- a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/krb5.conf.template +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/krb5.conf.template @@ -32,12 +32,8 @@ [realms] ${REALM_NAME} = { - kdc = ${KDC_NAME}:88 - kdc = ${KDC_NAME}:60088 - kdc = localhost:60088 - kdc = localhost:88 - kdc = 127.0.0.1:60088 kdc = 127.0.0.1:88 + kdc = 127.0.0.1:${MAPPED_PORT} admin_server = ${KDC_NAME}:749 default_domain = ${BUILD_ZONE} } diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/peppa.sh b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/peppa.sh new file mode 100644 index 0000000000000..815a9e94e8cb5 --- /dev/null +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/peppa.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +set -e + +addprinc.sh elasticsearch +addprinc.sh HTTP/localhost +addprinc.sh peppa +addprinc.sh george dino + +# Use this as a signal that setup is complete +python3 -m http.server 4444 & + +sleep infinity \ No newline at end of file diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index a59becbfe6b54..50b709f77dca5 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -2,9 +2,11 @@ import java.nio.file.Path import java.nio.file.Paths import java.nio.file.Files -apply plugin: 'elasticsearch.vagrantsupport' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' +apply plugin: 'elasticsearch.test.fixtures' + +testFixtures.useFixture ":test:fixtures:krb5kdc-fixture" dependencies { testCompile "org.elasticsearch.plugin:x-pack-core:${version}" @@ -12,75 +14,6 @@ dependencies { testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } -// MIT Kerberos Vagrant Testing Fixture -String box = "krb5kdc" -Map vagrantEnvVars = [ - 'VAGRANT_CWD' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}", - 'VAGRANT_VAGRANTFILE' : 'Vagrantfile', - 'VAGRANT_PROJECT_DIR' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}" -] - -task krb5kdcUpdate(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'box' - subcommand 'update' - boxName box - environmentVars vagrantEnvVars - dependsOn "vagrantCheckVersion", "virtualboxCheckVersion" -} - -task krb5kdcFixture(type: org.elasticsearch.gradle.test.VagrantFixture) { - command 'up' - args '--provision', '--provider', 'virtualbox' - boxName box - environmentVars vagrantEnvVars - dependsOn krb5kdcUpdate -} - -// lazily resolve to avoid any slowdowns from DNS lookups prior to when we need this value -Object httpPrincipal = new Object() { - @Override - String toString() { - InetAddress resolvedAddress = InetAddress.getByName('127.0.0.1') - return "HTTP/" + resolvedAddress.getCanonicalHostName() - } -} - -String realm = "BUILD.ELASTIC.CO" - -task 'addPrincipal#peppa'(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'ssh' - args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh peppa " - boxName box - environmentVars vagrantEnvVars - dependsOn krb5kdcFixture -} - -task 'addPrincipal#george'(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'ssh' - args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh george dino" - boxName box - environmentVars vagrantEnvVars - dependsOn krb5kdcFixture -} - -task 'addPrincipal#HTTP'(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { - command 'ssh' - args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh $httpPrincipal" - boxName box - environmentVars vagrantEnvVars - dependsOn krb5kdcFixture -} - -task krb5AddPrincipals { dependsOn krb5kdcFixture, 'addPrincipal#peppa', 'addPrincipal#george', 'addPrincipal#HTTP' } - -def generatedResources = "$buildDir/generated-resources/keytabs" -task copyKeytabToGeneratedResources(type: Copy) { - Path peppaKeytab = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("peppa.keytab").toAbsolutePath() - from peppaKeytab; - into generatedResources - dependsOn krb5AddPrincipals -} - integTestCluster { // force localhost IPv4 otherwise it is a chicken and egg problem where we need the keytab for the hostname when starting the cluster // but do not know the exact address that is first in the http ports file @@ -96,12 +29,10 @@ integTestCluster { setting 'xpack.security.authc.realms.kerberos.kerberos.krb.debug', 'true' setting 'xpack.security.authc.realms.kerberos.kerberos.remove_realm_name', 'false' - Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath() - String jvmArgsStr = " -Djava.security.krb5.conf=${krb5conf}" + " -Dsun.security.krb5.debug=true" - jvmArgs jvmArgsStr - Path esKeytab = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs") - .resolve("$httpPrincipal".replace('/', '_') + ".keytab").toAbsolutePath() - extraConfigFile("es.keytab", "${esKeytab}") + jvmArgs += " -Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("peppa")}" + jvmArgs += " -Dsun.security.krb5.debug=true" + + extraConfigFile("es.keytab", project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "HTTP_localhost.keytab")) setupCommand 'setupTestAdmin', 'bin/elasticsearch-users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser" @@ -119,6 +50,7 @@ integTestCluster { } +String realm = "BUILD.ELASTIC.CO" integTestRunner { Path peppaKeytab = Paths.get("${project.buildDir}", "generated-resources", "keytabs", "peppa.keytab") systemProperty 'test.userkt', "peppa@${realm}" @@ -126,16 +58,17 @@ integTestRunner { systemProperty 'test.userpwd', "george@${realm}" systemProperty 'test.userpwd.password', "dino" systemProperty 'tests.security.manager', 'true' - Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath() - List jvmargs = ["-Djava.security.krb5.conf=${krb5conf}","-Dsun.security.krb5.debug=true"] - jvmArgs jvmargs + jvmArgs([ + "-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("peppa")}", + "-Dsun.security.krb5.debug=true" + ]) } -if (project.rootProject.vagrantSupported == false) { - integTest.enabled = false - testingConventions.enabled = false -} else { - project.sourceSets.test.output.dir(generatedResources) - integTestCluster.dependsOn krb5AddPrincipals, krb5kdcFixture, copyKeytabToGeneratedResources - integTest.finalizedBy project(':test:fixtures:krb5kdc-fixture').halt +def generatedResources = "$buildDir/generated-resources/keytabs" +task copyKeytabToGeneratedResources(type: Copy) { + from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "peppa.keytab") + into generatedResources + dependsOn project(':test:fixtures:krb5kdc-fixture').postProcessFixture } +project.sourceSets.test.output.dir(generatedResources, builtBy:copyKeytabToGeneratedResources) + From 459715dba781b00e07ec1ad205e9bf3b5a6e9799 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 28 Mar 2019 11:29:29 -0400 Subject: [PATCH 02/63] Add randomScore function in script_score query (#40186) To make script_score query to have the same features as function_score query, we need to add randomScore function. This function produces different random scores on different index shards. It is also able to produce random scores based on the internal Lucene Document Ids. --- .../query-dsl/script-score-query.asciidoc | 60 +++---- .../painless/spi/org.elasticsearch.score.txt | 7 +- .../test/painless/80_script_score.yml | 55 ------- .../painless/85_script_score_random_score.yml | 146 ++++++++++++++++++ .../search/function/ScriptScoreFunction.java | 15 ++ .../ScriptScoreFunctionBuilder.java | 2 +- .../org/elasticsearch/script/ScoreScript.java | 72 +++++++++ .../script/ScoreScriptUtils.java | 58 ++++--- 8 files changed, 301 insertions(+), 114 deletions(-) create mode 100644 modules/lang-painless/src/test/resources/rest-api-spec/test/painless/85_script_score_random_score.yml diff --git a/docs/reference/query-dsl/script-score-query.asciidoc b/docs/reference/query-dsl/script-score-query.asciidoc index ee68d3e40fe13..56c4f7c41b8ee 100644 --- a/docs/reference/query-dsl/script-score-query.asciidoc +++ b/docs/reference/query-dsl/script-score-query.asciidoc @@ -182,60 +182,44 @@ different from the query's vector, 0 is used for missing dimensions in the calculations of vector functions. -[[random-functions]] -===== Random functions -There are two predefined ways to produce random values: -`randomNotReproducible` and `randomReproducible`. +[[random-score-function]] +===== Random score function +`random_score` function generates scores that are uniformly distributed +from 0 up to but not including 1. -`randomNotReproducible()` uses `java.util.Random` class -to generate a random value of the type `long`. -The generated values are not reproducible between requests' invocations. +`randomScore` function has the following syntax: +`randomScore(, )`. +It has a required parameter - `seed` as an integer value, +and an optional parameter - `fieldName` as a string value. [source,js] -------------------------------------------------- "script" : { - "source" : "randomNotReproducible()" + "source" : "randomScore(100, '_seq_no')" } -------------------------------------------------- // NOTCONSOLE - -`randomReproducible(String seedValue, int seed)` produces -reproducible random values of type `long`. This function requires -more computational time and memory than the non-reproducible version. - -A good candidate for the `seedValue` is document field values that -are unique across documents and already pre-calculated and preloaded -in the memory. For example, values of the document's `_seq_no` field -is a good candidate, as documents on the same shard have unique values -for the `_seq_no` field. +If the `fieldName` parameter is omitted, the internal Lucene +document ids will be used as a source of randomness. This is very efficient, +but unfortunately not reproducible since documents might be renumbered +by merges. [source,js] -------------------------------------------------- "script" : { - "source" : "randomReproducible(Long.toString(doc['_seq_no'].value), 100)" + "source" : "randomScore(100)" } -------------------------------------------------- // NOTCONSOLE -A drawback of using `_seq_no` is that generated values change if -documents are updated. Another drawback is not absolute uniqueness, as -documents from different shards with the same sequence numbers -generate the same random values. - -If you need random values to be distinct across different shards, -you can use a field with unique values across shards, -such as `_id`, but watch out for the memory usage as all -these unique values need to be loaded into memory. - -[source,js] --------------------------------------------------- -"script" : { - "source" : "randomReproducible(doc['_id'].value, 100)" -} --------------------------------------------------- -// NOTCONSOLE +Note that documents that are within the same shard and have the +same value for field will get the same score, so it is usually desirable +to use a field that has unique values for all documents across a shard. +A good default choice might be to use the `_seq_no` +field, whose only drawback is that scores will change if the document is +updated since update operations also update the value of the `_seq_no` field. [[decay-functions]] @@ -349,8 +333,8 @@ the following script: ===== `random_score` -Use `randomReproducible` and `randomNotReproducible` functions -as described in <>. +Use `randomScore` function +as described in <>. ===== `field_value_factor` diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.score.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.score.txt index 3d7b29826c747..03ec9275aa8b7 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.score.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.score.txt @@ -19,11 +19,14 @@ # This file contains a whitelist for functions to be used in Score context +class org.elasticsearch.script.ScoreScript no_import { +} + static_import { double saturation(double, double) from_class org.elasticsearch.script.ScoreScriptUtils double sigmoid(double, double, double) from_class org.elasticsearch.script.ScoreScriptUtils - double randomReproducible(String, int) from_class org.elasticsearch.script.ScoreScriptUtils - double randomNotReproducible() bound_to org.elasticsearch.script.ScoreScriptUtils$RandomNotReproducible + double randomScore(org.elasticsearch.script.ScoreScript, int, String) bound_to org.elasticsearch.script.ScoreScriptUtils$RandomScoreField + double randomScore(org.elasticsearch.script.ScoreScript, int) bound_to org.elasticsearch.script.ScoreScriptUtils$RandomScoreDoc double decayGeoLinear(String, String, String, double, GeoPoint) bound_to org.elasticsearch.script.ScoreScriptUtils$DecayGeoLinear double decayGeoExp(String, String, String, double, GeoPoint) bound_to org.elasticsearch.script.ScoreScriptUtils$DecayGeoExp double decayGeoGauss(String, String, String, double, GeoPoint) bound_to org.elasticsearch.script.ScoreScriptUtils$DecayGeoGauss diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/80_script_score.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/80_script_score.yml index a3135777c952c..cf55810058d92 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/80_script_score.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/80_script_score.yml @@ -72,61 +72,6 @@ setup: - match: { hits.hits.1._id: d2 } - match: { hits.hits.2._id: d1 } ---- -"Random functions": - - do: - indices.create: - index: test - body: - settings: - number_of_shards: 2 - mappings: - properties: - f1: - type: keyword - - do: - index: - index: test - id: 1 - body: {"f1": "v1"} - - do: - index: - index: test - id: 2 - body: {"f1": "v2"} - - do: - index: - index: test - id: 3 - body: {"f1": "v3"} - - - do: - indices.refresh: {} - - - do: - search: - rest_total_hits_as_int: true - index: test - body: - query: - script_score: - query: {match_all: {} } - script: - source: "randomReproducible(Long.toString(doc['_seq_no'].value), 100)" - - match: { hits.total: 3 } - - - do: - search: - rest_total_hits_as_int: true - index: test - body: - query: - script_score: - query: {match_all: {} } - script: - source: "randomNotReproducible()" - - match: { hits.total: 3 } - --- "Decay geo functions": - do: diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/85_script_score_random_score.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/85_script_score_random_score.yml new file mode 100644 index 0000000000000..2879d50fedebc --- /dev/null +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/85_script_score_random_score.yml @@ -0,0 +1,146 @@ +# Integration tests for ScriptScoreQuery using Painless + +setup: +- skip: + version: " - 7.99.99" # correct to 7.09.99 after backporting to 7.1 + reason: "random score function of script score was added in 7.1" + +--- +"Random score function with _seq_no field": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 2 + mappings: + properties: + f1: + type: keyword + + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test"}}' + - '{"f1": "v0"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v1"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v2"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v3"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v4"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v5"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v6"}' + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + script_score: + query: {match_all: {} } + script: + source: "randomScore(100, '_seq_no')" + # stash ids to check for reproducibility of ranking + - set: { hits.hits.0._id: id0 } + - set: { hits.hits.1._id: id1 } + - set: { hits.hits.2._id: id2 } + - set: { hits.hits.3._id: id3 } + - set: { hits.hits.4._id: id4 } + - set: { hits.hits.5._id: id5 } + - set: { hits.hits.6._id: id6 } + + # check that ranking is reproducible + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + script_score: + query: {match_all: {} } + script: + source: "randomScore(100, '_seq_no')" + - match: { hits.hits.0._id: $id0 } + - match: { hits.hits.1._id: $id1 } + - match: { hits.hits.2._id: $id2 } + - match: { hits.hits.3._id: $id3 } + - match: { hits.hits.4._id: $id4 } + - match: { hits.hits.5._id: $id5 } + - match: { hits.hits.6._id: $id6 } + +--- +"Random score function with internal doc Ids": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + mappings: + properties: + f1: + type: keyword + + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test"}}' + - '{"f1": "v0"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v1"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v2"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v3"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v4"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v5"}' + - '{"index": {"_index": "test"}}' + - '{"f1": "v6"}' + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + script_score: + query: {match_all: {} } + script: + source: "randomScore(100)" + # stash ids to check for reproducibility of ranking + - set: { hits.hits.0._id: id0 } + - set: { hits.hits.1._id: id1 } + - set: { hits.hits.2._id: id2 } + - set: { hits.hits.3._id: id3 } + - set: { hits.hits.4._id: id4 } + - set: { hits.hits.5._id: id5 } + - set: { hits.hits.6._id: id6 } + + # check that ranking is reproducible + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + script_score: + query: {match_all: {} } + script: + source: "randomScore(100)" + - match: { hits.hits.0._id: $id0 } + - match: { hits.hits.1._id: $id1 } + - match: { hits.hits.2._id: $id2 } + - match: { hits.hits.3._id: $id3 } + - match: { hits.hits.4._id: $id4 } + - match: { hits.hits.5._id: $id5 } + - match: { hits.hits.6._id: $id6 } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 8e51bc5951d59..960df44a62514 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -50,11 +50,24 @@ public float score() { private final ScoreScript.LeafFactory script; + private final int shardId; + private final String indexName; + public ScriptScoreFunction(Script sScript, ScoreScript.LeafFactory script) { super(CombineFunction.REPLACE); this.sScript = sScript; this.script = script; + this.indexName = null; + this.shardId = -1; + } + + public ScriptScoreFunction(Script sScript, ScoreScript.LeafFactory script, String indexName, int shardId) { + super(CombineFunction.REPLACE); + this.sScript = sScript; + this.script = script; + this.indexName = indexName; + this.shardId = shardId; } @Override @@ -62,6 +75,8 @@ public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOEx final ScoreScript leafScript = script.newInstance(ctx); final CannedScorer scorer = new CannedScorer(); leafScript.setScorer(scorer); + leafScript._setIndexName(indexName); + leafScript._setShard(shardId); return new LeafScoreFunction() { @Override public double score(int docId, float subQueryScore) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java index a860bd19d7c5f..accfd2f656999 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java @@ -94,7 +94,7 @@ protected ScoreFunction doToFunction(QueryShardContext context) { try { ScoreScript.Factory factory = context.getScriptService().compile(script, ScoreScript.CONTEXT); ScoreScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup()); - return new ScriptScoreFunction(script, searchScript); + return new ScriptScoreFunction(script, searchScript, context.index().getName(), context.getShardId()); } catch (Exception e) { throw new QueryShardException(context, "script_score: the script could not be loaded", e); } diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index 6ac5935826bf7..f31af4c008c74 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -62,6 +62,11 @@ public abstract class ScoreScript { private DoubleSupplier scoreSupplier = () -> 0.0; + private final int docBase; + private int docId; + private int shardId = -1; + private String indexName = null; + public ScoreScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { // null check needed b/c of expression engine subclass if (lookup == null) { @@ -69,11 +74,13 @@ public ScoreScript(Map params, SearchLookup lookup, LeafReaderCo assert leafContext == null; this.params = null; this.leafLookup = null; + this.docBase = 0; } else { this.leafLookup = lookup.getLeafSearchLookup(leafContext); params = new HashMap<>(params); params.putAll(leafLookup.asMap()); this.params = new DeprecationMap(params, DEPRECATIONS, "score-script"); + this.docBase = leafContext.docBase; } } @@ -91,6 +98,7 @@ public final Map> getDoc() { /** Set the current document to run the script on next. */ public void setDocument(int docid) { + this.docId = docid; leafLookup.setDocument(docid); } @@ -104,10 +112,74 @@ public void setScorer(Scorable scorer) { }; } + /** + * Accessed as _score in the painless script + * @return the score of the inner query + */ public double get_score() { return scoreSupplier.getAsDouble(); } + + /** + * Starting a name with underscore, so that the user cannot access this function directly through a script + * It is only used within predefined painless functions. + * @return the internal document ID + */ + public int _getDocId() { + return docId; + } + + /** + * Starting a name with underscore, so that the user cannot access this function directly through a script + * It is only used within predefined painless functions. + * @return the internal document ID with the base + */ + public int _getDocBaseId() { + return docBase + docId; + } + + /** + * Starting a name with underscore, so that the user cannot access this function directly through a script + * It is only used within predefined painless functions. + * @return shard id or throws an exception if shard is not set up for this script instance + */ + public int _getShardId() { + if (shardId > -1) { + return shardId; + } else { + throw new IllegalArgumentException("shard id can not be looked up!"); + } + } + + /** + * Starting a name with underscore, so that the user cannot access this function directly through a script + * It is only used within predefined painless functions. + * @return index name or throws an exception if the index name is not set up for this script instance + */ + public String _getIndex() { + if (indexName != null) { + return indexName; + } else { + throw new IllegalArgumentException("index name can not be looked up!"); + } + } + + /** + * Starting a name with underscore, so that the user cannot access this function directly through a script + */ + public void _setShard(int shardId) { + this.shardId = shardId; + } + + /** + * Starting a name with underscore, so that the user cannot access this function directly through a script + */ + public void _setIndexName(String indexName) { + this.indexName = indexName; + } + + /** A factory to construct {@link ScoreScript} instances. */ public interface LeafFactory { diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java index 273b8fcf8559d..c7d6e889397ff 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java @@ -21,22 +21,20 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.StringHelper; -import org.elasticsearch.common.Randomness; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.DateFieldMapper; import java.time.ZoneId; -import java.util.Random; -/** - * ScoringScriptImpl can be used as {@link ScoreScript} - * to run a previously compiled Painless script. - */ +import static com.carrotsearch.hppc.BitMixer.mix32; + public final class ScoreScriptUtils { /****** STATIC FUNCTIONS that can be used by users for score calculations **/ @@ -53,26 +51,50 @@ public static double sigmoid(double value, double k, double a){ return Math.pow(value,a) / (Math.pow(k,a) + Math.pow(value,a)); } + // random score based on the documents' values of the given field + public static final class RandomScoreField { + private final ScoreScript scoreScript; + private final ScriptDocValues docValues; + private final int saltedSeed; - // reproducible random - public static double randomReproducible(String seedValue, int seed) { - int hash = StringHelper.murmurhash3_x86_32(new BytesRef(seedValue), seed); - return (hash & 0x00FFFFFF) / (float)(1 << 24); // only use the lower 24 bits to construct a float from 0.0-1.0 - } - // not reproducible random - public static final class RandomNotReproducible { - private final Random rnd; + public RandomScoreField(ScoreScript scoreScript, int seed, String fieldName) { + this.scoreScript = scoreScript; + this.docValues = scoreScript.getDoc().get(fieldName); + int salt = (scoreScript._getIndex().hashCode() << 10) | scoreScript._getShardId(); + this.saltedSeed = mix32(salt ^ seed); - public RandomNotReproducible() { - this.rnd = Randomness.get(); } - public double randomNotReproducible() { - return rnd.nextDouble(); + public double randomScore() { + try { + docValues.setNextDocId(scoreScript._getDocId()); + String seedValue = String.valueOf(docValues.get(0)); + int hash = StringHelper.murmurhash3_x86_32(new BytesRef(seedValue), saltedSeed); + return (hash & 0x00FFFFFF) / (float)(1 << 24); // only use the lower 24 bits to construct a float from 0.0-1.0 + } catch (Exception e) { + throw ExceptionsHelper.convertToElastic(e); + } } } + // random score based on the internal Lucene document Ids + public static final class RandomScoreDoc { + private final ScoreScript scoreScript; + private final int saltedSeed; + + public RandomScoreDoc(ScoreScript scoreScript, int seed) { + this.scoreScript = scoreScript; + int salt = (scoreScript._getIndex().hashCode() << 10) | scoreScript._getShardId(); + this.saltedSeed = mix32(salt ^ seed); + } + + public double randomScore() { + String seedValue = Integer.toString(scoreScript._getDocBaseId()); + int hash = StringHelper.murmurhash3_x86_32(new BytesRef(seedValue), saltedSeed); + return (hash & 0x00FFFFFF) / (float)(1 << 24); // only use the lower 24 bits to construct a float from 0.0-1.0 + } + } // **** Decay functions on geo field public static final class DecayGeoLinear { From f732a04b863c6baeb252387b6e7d5935f23ebd30 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 28 Mar 2019 17:35:33 +0200 Subject: [PATCH 03/63] Disable integTest when Docker is not available (#40585) * Disable integTest when Docker is not available Resolves: #40549 --- x-pack/qa/saml-idp-tests/build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x-pack/qa/saml-idp-tests/build.gradle b/x-pack/qa/saml-idp-tests/build.gradle index 44a28278636a9..7b76321fe9d4f 100644 --- a/x-pack/qa/saml-idp-tests/build.gradle +++ b/x-pack/qa/saml-idp-tests/build.gradle @@ -38,6 +38,9 @@ task setupPorts { idpMetaFile.write(content.toString(), "UTF-8") } } +// Don't attempt to get ephemeral ports when Docker is not available +setupPorts.onlyIf { idpFixtureProject.postProcessFixture.enabled } + integTestCluster.dependsOn setupPorts integTestCluster { From c6b9868192f1fb96a50e5d1c823b3302080556fd Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 28 Mar 2019 16:53:20 +0100 Subject: [PATCH 04/63] Mute DataFrameAuditorIT#testAuditorWritesAudits Relates to #40594 --- .../xpack/dataframe/integration/DataFrameAuditorIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java index 750faf8dade51..2367e255cd9ba 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java @@ -49,6 +49,7 @@ public void createIndexes() throws IOException { setupUser(TEST_USER_NAME, Arrays.asList("data_frame_transforms_admin", DATA_ACCESS_ROLE)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40594") @SuppressWarnings("unchecked") public void testAuditorWritesAudits() throws Exception { String transformId = "simplePivotForAudit"; From fc404e59f5245411cc3ca702d3064b5436a88198 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 28 Mar 2019 09:41:56 -0700 Subject: [PATCH 05/63] [DOCS] Adds anchors for ruby client (#39867) --- docs/ruby/client.asciidoc | 1 + docs/ruby/copyright.asciidoc | 1 + docs/ruby/model.asciidoc | 1 + docs/ruby/persistence.asciidoc | 1 + docs/ruby/rails.asciidoc | 1 + 5 files changed, 5 insertions(+) diff --git a/docs/ruby/client.asciidoc b/docs/ruby/client.asciidoc index 2037ae1a0b280..074c77d41b03b 100644 --- a/docs/ruby/client.asciidoc +++ b/docs/ruby/client.asciidoc @@ -1,3 +1,4 @@ +[[ruby_client]] == The Ruby Client The `elasticsearch` http://rubygems.org/gems/elasticsearch[Rubygem] provides a low-level client diff --git a/docs/ruby/copyright.asciidoc b/docs/ruby/copyright.asciidoc index 3747cc572e40f..8a84be27636f4 100644 --- a/docs/ruby/copyright.asciidoc +++ b/docs/ruby/copyright.asciidoc @@ -1,3 +1,4 @@ +[[copyright]] == Copyright and License This software is Copyright (c) 2013-2018 by Elasticsearch BV. diff --git a/docs/ruby/model.asciidoc b/docs/ruby/model.asciidoc index 0b0be45708fa8..62339bb239149 100644 --- a/docs/ruby/model.asciidoc +++ b/docs/ruby/model.asciidoc @@ -1,3 +1,4 @@ +[[activemodel_activerecord]] == ActiveModel / ActiveRecord The `elasticsearch-model` http://rubygems.org/gems/elasticsearch-model[Rubygem] diff --git a/docs/ruby/persistence.asciidoc b/docs/ruby/persistence.asciidoc index 7d361978ee703..5306dae47c661 100644 --- a/docs/ruby/persistence.asciidoc +++ b/docs/ruby/persistence.asciidoc @@ -1,3 +1,4 @@ +[[persistence]] == Persistence The `elasticsearch-persistence` http://rubygems.org/gems/elasticsearch-persistence[Rubygem] diff --git a/docs/ruby/rails.asciidoc b/docs/ruby/rails.asciidoc index 1fef3f42381a6..213258c7e2266 100644 --- a/docs/ruby/rails.asciidoc +++ b/docs/ruby/rails.asciidoc @@ -1,3 +1,4 @@ +[[ruby_on_rails]] == Ruby On Rails The `elasticsearch-rails` http://rubygems.org/gems/elasticsearch-rails[Rubygem] From 9ff6bbce1d78812bcc254dbd0dfb6583f7d14221 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 28 Mar 2019 10:42:28 -0600 Subject: [PATCH 06/63] Handle null retention leases in WaitForNoFollowersStep (#40477) In some cases the retention leases can return null, causing a `NullPointerException` when waiting for no followers. This wraps those so that no NPE is thrown. Here is an example failure: ``` [2019-03-26T09:24:01,368][ERROR][o.e.x.i.IndexLifecycleRunner] [node-0] policy [deletePolicy] for index [ilm-00001] failed on step [{"phase":"delete","action":"delete","name":"wait-for-shard-history-leases"}]. Moving to ERROR step java.lang.NullPointerException: null at org.elasticsearch.xpack.core.indexlifecycle.WaitForNoFollowersStep.lambda$evaluateCondition$0(WaitForNoFollowersStep.java:60) ~[?:?] at java.util.stream.ReferencePipeline$7$1.accept(ReferencePipeline.java:267) ~[?:1.8.0_191] at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193) ~[?:1.8.0_191] at java.util.Spliterators$ArraySpliterator.tryAdvance(Spliterators.java:958) ~[?:1.8.0_191] at java.util.stream.ReferencePipeline.forEachWithCancel(ReferencePipeline.java:126) ~[?:1.8.0_191] at java.util.stream.AbstractPipeline.copyIntoWithCancel(AbstractPipeline.java:498) ~[?:1.8.0_191] at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:485) ~[?:1.8.0_191] at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471) ~[?:1.8.0_191] at java.util.stream.MatchOps$MatchOp.evaluateSequential(MatchOps.java:230) ~[?:1.8.0_191] at java.util.stream.MatchOps$MatchOp.evaluateSequential(MatchOps.java:196) ~[?:1.8.0_191] at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:1.8.0_191] at java.util.stream.ReferencePipeline.anyMatch(ReferencePipeline.java:449) ~[?:1.8.0_191] at org.elasticsearch.xpack.core.indexlifecycle.WaitForNoFollowersStep.lambda$evaluateCondition$2(WaitForNoFollowersStep.java:61) ~[?:?] at org.elasticsearch.action.ActionListener$1.onResponse(ActionListener.java:62) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.action.support.ContextPreservingActionListener.onResponse(ContextPreservingActionListener.java:43) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.action.support.TransportAction$1.onResponse(TransportAction.java:68) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.action.support.TransportAction$1.onResponse(TransportAction.java:64) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.action.support.ContextPreservingActionListener.onResponse(ContextPreservingActionListener.java:43) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction$AsyncAction.onCompletion(TransportBroadcastByNodeAction.java:383) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction$AsyncAction.onNodeResponse(TransportBroadcastByNodeAction.java:352) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction$AsyncAction$1.handleResponse(TransportBroadcastByNodeAction.java:324) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction$AsyncAction$1.handleResponse(TransportBroadcastByNodeAction.java:314) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.transport.TransportService$ContextRestoreResponseHandler.handleResponse(TransportService.java:1095) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] at org.elasticsearch.transport.TransportService$DirectResponseChannel.processResponse(TransportService.java:1176) ~[elasticsearch-8.0.0-SNAPSHOT.jar:8.0.0-SNAPSHOT] ... ``` --- .../WaitForNoFollowersStep.java | 9 +++-- .../WaitForNoFollowersStepTests.java | 36 +++++++++++++++++++ 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForNoFollowersStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForNoFollowersStep.java index 3cfaeba048d5f..958120b99b879 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForNoFollowersStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForNoFollowersStep.java @@ -20,7 +20,9 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Objects; +import java.util.Optional; /** * A step that waits until the index it's used on is no longer a leader index. @@ -57,8 +59,11 @@ public void evaluateCondition(IndexMetaData indexMetaData, Listener listener) { boolean isCurrentlyLeaderIndex = Arrays.stream(indexStats.getShards()) .map(ShardStats::getRetentionLeaseStats) - .flatMap(retentionLeaseStats -> retentionLeaseStats.retentionLeases().leases().stream()) - .anyMatch(lease -> CCR_LEASE_KEY.equals(lease.source())); + .map(Optional::ofNullable) + .map(o -> o.flatMap(stats -> Optional.ofNullable(stats.retentionLeases()))) + .map(o -> o.flatMap(leases -> Optional.ofNullable(leases.leases()))) + .map(o -> o.map(Collection::stream)) + .anyMatch(lease -> lease.isPresent() && lease.get().anyMatch(l -> CCR_LEASE_KEY.equals(l.source()))); if (isCurrentlyLeaderIndex) { listener.onResponse(false, new Info()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForNoFollowersStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForNoFollowersStepTests.java index f1f3c053e2345..6953455489d1a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForNoFollowersStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/WaitForNoFollowersStepTests.java @@ -132,6 +132,42 @@ public void onFailure(Exception e) { containsString("this index is a leader index; waiting for all following indices to cease following before proceeding")); } + public void testNoShardStats() { + WaitForNoFollowersStep step = createRandomInstance(); + + String indexName = randomAlphaOfLengthBetween(5,10); + + int numberOfShards = randomIntBetween(1, 100); + final IndexMetaData indexMetaData = IndexMetaData.builder(indexName) + .settings(settings(Version.CURRENT)) + .numberOfShards(numberOfShards) + .numberOfReplicas(randomIntBetween(1, 10)) + .build(); + + ShardStats sStats = new ShardStats(null, mockShardPath(), null, null, null, null); + ShardStats[] shardStats = new ShardStats[1]; + shardStats[0] = sStats; + mockIndexStatsCall(step.getClient(), indexName, new IndexStats(indexName, "uuid", shardStats)); + + final SetOnce conditionMetHolder = new SetOnce<>(); + final SetOnce stepInfoHolder = new SetOnce<>(); + step.evaluateCondition(indexMetaData, new AsyncWaitStep.Listener() { + @Override + public void onResponse(boolean conditionMet, ToXContentObject infomationContext) { + conditionMetHolder.set(conditionMet); + stepInfoHolder.set(infomationContext); + } + + @Override + public void onFailure(Exception e) { + fail("onFailure should not be called in this test, called with exception: " + e.getMessage()); + } + }); + + assertTrue(conditionMetHolder.get()); + assertNull(stepInfoHolder.get()); + } + public void testFailure() { WaitForNoFollowersStep step = createRandomInstance(); From bd9e9b3acf0b08ba3bc1c647fbffc22e2d305002 Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Thu, 28 Mar 2019 18:55:31 +0100 Subject: [PATCH 07/63] Geo Point parse error fix (#40447) When geo point parsing threw a parse exception, it did not consume remaining tokens from the parser. This in turn meant that indexing documents with malformed geo points into mappings with ignore_malformed=true would fail in some cases, since DocumentParser expects geo_point parsing to end on the END_OBJECT token. Related to #17617 --- .../common/xcontent/XContentSubParser.java | 6 +- .../common/xcontent/XContentParserTests.java | 48 +++++++- .../elasticsearch/common/geo/GeoUtils.java | 104 +++++++++--------- .../mapper/GeoPointFieldMapperTests.java | 10 ++ .../index/search/geo/GeoUtilsTests.java | 32 ++++++ 5 files changed, 145 insertions(+), 55 deletions(-) diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentSubParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentSubParser.java index e02f9f176246e..adcbf6ef1bee0 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentSubParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentSubParser.java @@ -25,7 +25,7 @@ import java.util.Map; /** - * Wrapper for a XContentParser that makes a single object to look like a complete document. + * Wrapper for a XContentParser that makes a single object/array look like a complete document. * * The wrapper prevents the parsing logic to consume tokens outside of the wrapped object as well * as skipping to the end of the object in case of a parsing error. The wrapper is intended to be @@ -39,8 +39,8 @@ public class XContentSubParser implements XContentParser { public XContentSubParser(XContentParser parser) { this.parser = parser; - if (parser.currentToken() != Token.START_OBJECT) { - throw new IllegalStateException("The sub parser has to be created on the start of an object"); + if (parser.currentToken() != Token.START_OBJECT && parser.currentToken() != Token.START_ARRAY) { + throw new IllegalStateException("The sub parser has to be created on the start of an object or array"); } level = 1; } diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java index 5dbe7be40f312..e98f1e3d58510 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java @@ -329,7 +329,7 @@ public void testNestedMapInList() throws IOException { } } - public void testSubParser() throws IOException { + public void testSubParserObject() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); int numberOfTokens; numberOfTokens = generateRandomObjectForMarking(builder); @@ -354,6 +354,7 @@ public void testSubParser() throws IOException { // And sometimes skipping children subParser.skipChildren(); } + } finally { assertFalse(subParser.isClosed()); subParser.close(); @@ -367,6 +368,49 @@ public void testSubParser() throws IOException { } } + public void testSubParserArray() throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + int numberOfArrayElements = randomInt(10); + builder.startObject(); + builder.field("array"); + builder.startArray(); + int numberOfTokens = 0; + for (int i = 0; i < numberOfArrayElements; ++i) { + numberOfTokens += generateRandomObjectForMarking(builder); + } + builder.endArray(); + builder.endObject(); + + String content = Strings.toString(builder); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); // array field + assertEquals("array", parser.currentName()); + assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); // [ + XContentParser subParser = new XContentSubParser(parser); + try { + int tokensToSkip = randomInt(numberOfTokens - 1); + for (int i = 0; i < tokensToSkip; i++) { + // Simulate incomplete parsing + assertNotNull(subParser.nextToken()); + } + if (randomBoolean()) { + // And sometimes skipping children + subParser.skipChildren(); + } + + } finally { + assertFalse(subParser.isClosed()); + subParser.close(); + assertTrue(subParser.isClosed()); + } + assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + } + public void testCreateSubParserAtAWrongPlace() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); generateRandomObjectForMarking(builder); @@ -377,7 +421,7 @@ public void testCreateSubParserAtAWrongPlace() throws IOException { assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); // first field assertEquals("first_field", parser.currentName()); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> new XContentSubParser(parser)); - assertEquals("The sub parser has to be created on the start of an object", exception.getMessage()); + assertEquals("The sub parser has to be created on the start of an object or array", exception.getMessage()); } } diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index a45667b908d74..6dcaaaa7d6a29 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.common.xcontent.XContentSubParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.fielddata.FieldData; @@ -435,51 +436,52 @@ public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point, fina NumberFormatException numberFormatException = null; if(parser.currentToken() == Token.START_OBJECT) { - while(parser.nextToken() != Token.END_OBJECT) { - if(parser.currentToken() == Token.FIELD_NAME) { - String field = parser.currentName(); - if(LATITUDE.equals(field)) { - parser.nextToken(); - switch (parser.currentToken()) { - case VALUE_NUMBER: - case VALUE_STRING: - try { - lat = parser.doubleValue(true); - } catch (NumberFormatException e) { - numberFormatException = e; - } - break; - default: - throw new ElasticsearchParseException("latitude must be a number"); - } - } else if (LONGITUDE.equals(field)) { - parser.nextToken(); - switch (parser.currentToken()) { - case VALUE_NUMBER: - case VALUE_STRING: - try { - lon = parser.doubleValue(true); - } catch (NumberFormatException e) { - numberFormatException = e; - } - break; - default: - throw new ElasticsearchParseException("longitude must be a number"); - } - } else if (GEOHASH.equals(field)) { - if(parser.nextToken() == Token.VALUE_STRING) { - geohash = parser.text(); + try (XContentSubParser subParser = new XContentSubParser(parser)) { + while (subParser.nextToken() != Token.END_OBJECT) { + if (subParser.currentToken() == Token.FIELD_NAME) { + String field = subParser.currentName(); + if (LATITUDE.equals(field)) { + subParser.nextToken(); + switch (subParser.currentToken()) { + case VALUE_NUMBER: + case VALUE_STRING: + try { + lat = subParser.doubleValue(true); + } catch (NumberFormatException e) { + numberFormatException = e; + } + break; + default: + throw new ElasticsearchParseException("latitude must be a number"); + } + } else if (LONGITUDE.equals(field)) { + subParser.nextToken(); + switch (subParser.currentToken()) { + case VALUE_NUMBER: + case VALUE_STRING: + try { + lon = subParser.doubleValue(true); + } catch (NumberFormatException e) { + numberFormatException = e; + } + break; + default: + throw new ElasticsearchParseException("longitude must be a number"); + } + } else if (GEOHASH.equals(field)) { + if (subParser.nextToken() == Token.VALUE_STRING) { + geohash = subParser.text(); + } else { + throw new ElasticsearchParseException("geohash must be a string"); + } } else { - throw new ElasticsearchParseException("geohash must be a string"); + throw new ElasticsearchParseException("field must be either [{}], [{}] or [{}]", LATITUDE, LONGITUDE, GEOHASH); } } else { - throw new ElasticsearchParseException("field must be either [{}], [{}] or [{}]", LATITUDE, LONGITUDE, GEOHASH); + throw new ElasticsearchParseException("token [{}] not allowed", subParser.currentToken()); } - } else { - throw new ElasticsearchParseException("token [{}] not allowed", parser.currentToken()); } } - if (geohash != null) { if(!Double.isNaN(lat) || !Double.isNaN(lon)) { throw new ElasticsearchParseException("field must be either lat/lon or geohash"); @@ -498,19 +500,21 @@ public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point, fina } } else if(parser.currentToken() == Token.START_ARRAY) { - int element = 0; - while(parser.nextToken() != Token.END_ARRAY) { - if(parser.currentToken() == Token.VALUE_NUMBER) { - element++; - if(element == 1) { - lon = parser.doubleValue(); - } else if(element == 2) { - lat = parser.doubleValue(); + try (XContentSubParser subParser = new XContentSubParser(parser)) { + int element = 0; + while (subParser.nextToken() != Token.END_ARRAY) { + if (subParser.currentToken() == Token.VALUE_NUMBER) { + element++; + if (element == 1) { + lon = subParser.doubleValue(); + } else if (element == 2) { + lat = subParser.doubleValue(); + } else { + GeoPoint.assertZValue(ignoreZValue, subParser.doubleValue()); + } } else { - GeoPoint.assertZValue(ignoreZValue, parser.doubleValue()); + throw new ElasticsearchParseException("numeric value expected"); } - } else { - throw new ElasticsearchParseException("numeric value expected"); } } return point.reset(lat, lon); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index f5597ecb1f443..2142fca565c9b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -523,5 +523,15 @@ public void testInvalidGeopointValuesIgnored() throws Exception { BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("location", "NaN,12").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); + + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", + BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject().startObject("location").nullField("lat").field("lon", 1).endObject().endObject() + ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); + + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", + BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject().startObject("location").nullField("lat").nullField("lon").endObject().endObject() + ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); } } diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java index ee916dd4c47dd..1a85e29f02090 100644 --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java @@ -397,6 +397,8 @@ public void testParseGeoPoint() throws IOException { parser.nextToken(); GeoPoint point = GeoUtils.parseGeoPoint(parser); assertThat(point, equalTo(new GeoPoint(lat, lon))); + assertThat(parser.currentToken(), is(Token.END_OBJECT)); + assertNull(parser.nextToken()); } json = jsonBuilder().startObject().field("lat", String.valueOf(lat)).field("lon", String.valueOf(lon)).endObject(); try (XContentParser parser = createParser(json)) { @@ -438,6 +440,21 @@ public void testParseGeoPointStringZValueError() throws IOException { } } + public void testParseGeoPointArrayZValueError() throws IOException { + double lat = randomDouble() * 180 - 90 + randomIntBetween(-1000, 1000) * 180; + double lon = randomDouble() * 360 - 180 + randomIntBetween(-1000, 1000) * 360; + double alt = randomDouble() * 1000; + XContentBuilder json = jsonBuilder().startArray().value(lat).value(lon).value(alt).endArray(); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, + () -> GeoUtils.parseGeoPoint(parser, new GeoPoint(), false)); + assertThat(e.getMessage(), containsString("but [ignore_z_value] parameter is [false]")); + assertThat(parser.currentToken(), is(Token.END_ARRAY)); + assertNull(parser.nextToken()); + } + } + public void testParseGeoPointGeohash() throws IOException { for (int i = 0; i < 100; i++) { int geoHashLength = randomIntBetween(1, GeoHashUtils.PRECISION); @@ -451,6 +468,8 @@ public void testParseGeoPointGeohash() throws IOException { GeoPoint point = GeoUtils.parseGeoPoint(parser); assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); + assertThat(parser.currentToken(), is(Token.END_OBJECT)); + assertNull(parser.nextToken()); } json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); try (XContentParser parser = createParser(json)) { @@ -470,6 +489,8 @@ public void testParseGeoPointGeohashWrongType() throws IOException { parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); assertThat(e.getMessage(), containsString("geohash must be a string")); + assertThat(parser.currentToken(), is(Token.END_OBJECT)); + assertNull(parser.nextToken()); } } @@ -480,6 +501,8 @@ public void testParseGeoPointLatNoLon() throws IOException { parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); assertThat(e.getMessage(), is("field [lon] missing")); + assertThat(parser.currentToken(), is(Token.END_OBJECT)); + assertNull(parser.nextToken()); } } @@ -490,6 +513,8 @@ public void testParseGeoPointLonNoLat() throws IOException { parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); assertThat(e.getMessage(), is("field [lat] missing")); + assertThat(parser.currentToken(), is(Token.END_OBJECT)); + assertNull(parser.nextToken()); } } @@ -500,6 +525,8 @@ public void testParseGeoPointLonWrongType() throws IOException { parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); assertThat(e.getMessage(), is("longitude must be a number")); + assertThat(parser.currentToken(), is(Token.END_OBJECT)); + assertNull(parser.nextToken()); } } @@ -510,6 +537,8 @@ public void testParseGeoPointLatWrongType() throws IOException { parser.nextToken(); Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); assertThat(e.getMessage(), is("latitude must be a number")); + assertThat(parser.currentToken(), is(Token.END_OBJECT)); + assertNull(parser.nextToken()); } } @@ -578,6 +607,9 @@ public void testParseGeoPointArrayWrongType() throws IOException { } Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); assertThat(e.getMessage(), is("numeric value expected")); + assertThat(parser.currentToken(), is(Token.END_ARRAY)); + assertThat(parser.nextToken(), is(Token.END_OBJECT)); + assertNull(parser.nextToken()); } } From 3fffae6714fe8054a4d0fda3c97b8bb37e857bee Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Thu, 28 Mar 2019 12:06:18 -0600 Subject: [PATCH 08/63] Remove with(out)-system-key tests (#40547) This change removes the variants of the rolling upgrade and full cluster restart tests that use or do not use a system key. These tests were added during 5.x when the system key was still used for security and now the system key is only used as the watcher encryption key so duplicating rolling upgrade and full cluster restarts is not needed. The change here removes the subprojects for testing these scenarios and defaults to always run with the watcher sensitive values encrypted for these tests. --- x-pack/qa/full-cluster-restart/build.gradle | 275 +++++-------- .../with-system-key/build.gradle | 0 .../without-system-key/build.gradle | 0 x-pack/qa/rolling-upgrade/build.gradle | 388 +++++++----------- .../with-system-key/build.gradle | 1 - .../without-system-key/build.gradle | 1 - 6 files changed, 262 insertions(+), 403 deletions(-) delete mode 100644 x-pack/qa/full-cluster-restart/with-system-key/build.gradle delete mode 100644 x-pack/qa/full-cluster-restart/without-system-key/build.gradle delete mode 100644 x-pack/qa/rolling-upgrade/with-system-key/build.gradle delete mode 100644 x-pack/qa/rolling-upgrade/without-system-key/build.gradle diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index 4c2ecd2b7b4ca..40dca76abc913 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -3,10 +3,10 @@ import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.Version import java.nio.charset.StandardCharsets -import java.util.regex.Matcher // Apply the java plugin to this project so the sources can be edited in an IDE -apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.standalone-test' + unitTest.enabled = false dependencies { @@ -70,8 +70,6 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> return tmpFile.exists() } -Project mainProject = project - String coreFullClusterRestartPath = project(':qa:full-cluster-restart').projectDir.toPath().resolve('src/test/java').toString() sourceSets { test { @@ -89,224 +87,157 @@ forbiddenPatterns { exclude '**/system_key' } -// tests are pushed down to subprojects -testingConventions.enabled = false - -/** - * Subdirectories of this project are test rolling upgrades with various - * configuration options based on their name. - */ -subprojects { - Matcher m = project.name =~ /with(out)?-system-key/ - if (false == m.matches()) { - throw new InvalidUserDataException("Invalid project name [${project.name}]") - } - boolean withSystemKey = m.group(1) == null - - apply plugin: 'elasticsearch.standalone-test' +String outputDir = "${buildDir}/generated-resources/${project.name}" - // Use resources from the rolling-upgrade project in subdirectories - sourceSets { - test { - java { - srcDirs = ["${mainProject.projectDir}/src/test/java", coreFullClusterRestartPath] - } - resources { - srcDirs = ["${mainProject.projectDir}/src/test/resources"] - } - } - } - - licenseHeaders { - approvedLicenses << 'Apache' - } - - forbiddenPatterns { - exclude '**/system_key' - } - - String outputDir = "${buildDir}/generated-resources/${project.name}" - - // This is a top level task which we will add dependencies to below. - // It is a single task that can be used to backcompat tests against all versions. - task bwcTest { +// This is a top level task which we will add dependencies to below. +// It is a single task that can be used to backcompat tests against all versions. +task bwcTest { description = 'Runs backwards compatibility tests.' group = 'verification' - } +} - String output = "${buildDir}/generated-resources/${project.name}" - task copyTestNodeKeyMaterial(type: Copy) { +task copyTestNodeKeyMaterial(type: Copy) { from project(':x-pack:plugin:core').files('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem', - 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt', - 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') + 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt', + 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') into outputDir - } +} - for (Version version : bwcVersions.indexCompatible) { +for (Version version : bwcVersions.indexCompatible) { String baseName = "v${version}" Task oldClusterTest = tasks.create(name: "${baseName}#oldClusterTest", type: RestIntegTestTask) { - mustRunAfter(precommit) + mustRunAfter(precommit) } Object extension = extensions.findByName("${baseName}#oldClusterTestCluster") configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { - dependsOn copyTestNodeKeyMaterial - if (version.before('6.3.0')) { - String depVersion = version; - if (project.bwcVersions.unreleased.contains(version)) { - depVersion += "-SNAPSHOT" - } - mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}" - - } - bwcVersion = version - numBwcNodes = 2 - numNodes = 2 - clusterName = 'full-cluster-restart' - String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/elasticsearch-users' - setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' - waitCondition = waitWithAuth - - // some tests rely on the translog not being flushed - setting 'indices.memory.shard_inactive_time', '20m' - - setting 'xpack.security.enabled', 'true' - setting 'xpack.security.transport.ssl.enabled', 'true' - if (project.inFipsJvm) { - setting 'xpack.security.transport.ssl.key', 'testnode.pem' - setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' - keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' - } else { - setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' - setting 'xpack.security.transport.ssl.keystore.password', 'testnode' - } - setting 'xpack.license.self_generated.type', 'trial' - dependsOn copyTestNodeKeyMaterial - extraConfigFile 'testnode.pem', new File(outputDir + '/testnode.pem') - extraConfigFile 'testnode.crt', new File(outputDir + '/testnode.crt') - extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') - if (withSystemKey) { - if (version.onOrAfter('5.1.0') && version.before('6.0.0')) { - // The setting didn't exist until 5.1.0 - setting 'xpack.security.system_key.required', 'true' + dependsOn copyTestNodeKeyMaterial + if (version.before('6.3.0')) { + String depVersion = version; + if (project.bwcVersions.unreleased.contains(version)) { + depVersion += "-SNAPSHOT" + } + mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}" + } - if (version.onOrAfter('6.0.0')) { - keystoreFile 'xpack.watcher.encryption_key', "${mainProject.projectDir}/src/test/resources/system_key" + bwcVersion = version + numBwcNodes = 2 + numNodes = 2 + clusterName = 'full-cluster-restart' + String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/elasticsearch-users' + setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' + waitCondition = waitWithAuth + + // some tests rely on the translog not being flushed + setting 'indices.memory.shard_inactive_time', '20m' + + setting 'xpack.security.enabled', 'true' + setting 'xpack.security.transport.ssl.enabled', 'true' + if (project.inFipsJvm) { + setting 'xpack.security.transport.ssl.key', 'testnode.pem' + setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' } else { - extraConfigFile 'x-pack/system_key', "${mainProject.projectDir}/src/test/resources/system_key" + setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' + setting 'xpack.security.transport.ssl.keystore.password', 'testnode' } + setting 'xpack.license.self_generated.type', 'trial' + dependsOn copyTestNodeKeyMaterial + extraConfigFile 'testnode.pem', new File(outputDir + '/testnode.pem') + extraConfigFile 'testnode.crt', new File(outputDir + '/testnode.crt') + extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') + + keystoreFile 'xpack.watcher.encryption_key', "${project.projectDir}/src/test/resources/system_key" setting 'xpack.watcher.encrypt_sensitive_data', 'true' - } } Task oldClusterTestRunner = tasks.getByName("${baseName}#oldClusterTestRunner") oldClusterTestRunner.configure { - systemProperty 'tests.is_old_cluster', 'true' - systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") - systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") - exclude 'org/elasticsearch/upgrades/FullClusterRestartIT.class' - exclude 'org/elasticsearch/upgrades/FullClusterRestartSettingsUpgradeIT.class' - exclude 'org/elasticsearch/upgrades/QueryBuilderBWCIT.class' + systemProperty 'tests.is_old_cluster', 'true' + systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") + systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") + exclude 'org/elasticsearch/upgrades/FullClusterRestartIT.class' + exclude 'org/elasticsearch/upgrades/FullClusterRestartSettingsUpgradeIT.class' + exclude 'org/elasticsearch/upgrades/QueryBuilderBWCIT.class' } Task upgradedClusterTest = tasks.create(name: "${baseName}#upgradedClusterTest", type: RestIntegTestTask) configure(extensions.findByName("${baseName}#upgradedClusterTestCluster")) { - dependsOn oldClusterTestRunner, - "${baseName}#oldClusterTestCluster#node0.stop", - "${baseName}#oldClusterTestCluster#node1.stop" - numNodes = 2 - clusterName = 'full-cluster-restart' - dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir } - cleanShared = false // We want to keep snapshots made by the old cluster! - setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' - waitCondition = waitWithAuth - - // some tests rely on the translog not being flushed - setting 'indices.memory.shard_inactive_time', '20m' - setting 'xpack.security.enabled', 'true' - if (project.inFipsJvm) { - setting 'xpack.security.transport.ssl.key', 'testnode.pem' - setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' - keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' - } else { - setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' - setting 'xpack.security.transport.ssl.keystore.password', 'testnode' - } - setting 'xpack.license.self_generated.type', 'trial' - dependsOn copyTestNodeKeyMaterial - extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') - extraConfigFile 'testnode.pem', new File(outputDir + '/testnode.pem') - extraConfigFile 'testnode.crt', new File(outputDir + '/testnode.crt') - if (withSystemKey) { - setting 'xpack.watcher.encrypt_sensitive_data', 'true' - keystoreFile 'xpack.watcher.encryption_key', "${mainProject.projectDir}/src/test/resources/system_key" - } + dependsOn oldClusterTestRunner, + "${baseName}#oldClusterTestCluster#node0.stop", + "${baseName}#oldClusterTestCluster#node1.stop" + numNodes = 2 + clusterName = 'full-cluster-restart' + dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir } + cleanShared = false // We want to keep snapshots made by the old cluster! + setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' + waitCondition = waitWithAuth + + // some tests rely on the translog not being flushed + setting 'indices.memory.shard_inactive_time', '20m' + setting 'xpack.security.enabled', 'true' + if (project.inFipsJvm) { + setting 'xpack.security.transport.ssl.key', 'testnode.pem' + setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' + } else { + setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' + setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + } + setting 'xpack.license.self_generated.type', 'trial' + dependsOn copyTestNodeKeyMaterial + extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') + extraConfigFile 'testnode.pem', new File(outputDir + '/testnode.pem') + extraConfigFile 'testnode.crt', new File(outputDir + '/testnode.crt') + + setting 'xpack.watcher.encrypt_sensitive_data', 'true' + keystoreFile 'xpack.watcher.encryption_key', "${project.projectDir}/src/test/resources/system_key" } Task upgradedClusterTestRunner = tasks.getByName("${baseName}#upgradedClusterTestRunner") upgradedClusterTestRunner.configure { - systemProperty 'tests.is_old_cluster', 'false' - systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") - systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") - exclude 'org/elasticsearch/upgrades/FullClusterRestartIT.class' - exclude 'org/elasticsearch/upgrades/FullClusterRestartSettingsUpgradeIT.class' - exclude 'org/elasticsearch/upgrades/QueryBuilderBWCIT.class' + systemProperty 'tests.is_old_cluster', 'false' + systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT") + systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") + exclude 'org/elasticsearch/upgrades/FullClusterRestartIT.class' + exclude 'org/elasticsearch/upgrades/FullClusterRestartSettingsUpgradeIT.class' + exclude 'org/elasticsearch/upgrades/QueryBuilderBWCIT.class' } Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") { - dependsOn = [upgradedClusterTest] + dependsOn = [upgradedClusterTest] } if (project.bwc_tests_enabled) { - bwcTest.dependsOn(versionBwcTest) + bwcTest.dependsOn(versionBwcTest) } - } - - unitTest.enabled = false // no unit tests for full cluster restarts, only the rest integration test +} - // basic integ tests includes testing bwc against the most recent version - task bwcTestSnapshots { +// basic integ tests includes testing bwc against the most recent version +task bwcTestSnapshots { if (project.bwc_tests_enabled) { - for (final def version : bwcVersions.unreleasedIndexCompatible) { - dependsOn "v${version}#bwcTest" - } + for (final def version : bwcVersions.unreleasedIndexCompatible) { + dependsOn "v${version}#bwcTest" + } } - } - - check.dependsOn(bwcTestSnapshots) +} - dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here - testCompile project(path: xpackModule('core'), configuration: 'default') - testCompile project(path: xpackModule('watcher'), configuration: 'runtime') - testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') - testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') - } +check.dependsOn(bwcTestSnapshots) - // copy x-pack plugin info so it is on the classpath and security manager has the right permissions - task copyXPackRestSpec(type: Copy) { +// copy x-pack plugin info so it is on the classpath and security manager has the right permissions +task copyXPackRestSpec(type: Copy) { dependsOn(project.configurations.restSpec, 'processTestResources') from project(xpackModule('core')).sourceSets.test.resources include 'rest-api-spec/api/**' into project.sourceSets.test.output.resourcesDir - } +} - task copyXPackPluginProps(type: Copy) { +task copyXPackPluginProps(type: Copy) { dependsOn(copyXPackRestSpec) from project(xpackModule('core')).file('src/main/plugin-metadata') from project(xpackModule('core')).tasks.pluginProperties into outputDir - } - project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) - - repositories { - maven { - url "https://artifacts.elastic.co/maven" - } - maven { - url "https://snapshots.elastic.co/maven" - } - } } +project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) diff --git a/x-pack/qa/full-cluster-restart/with-system-key/build.gradle b/x-pack/qa/full-cluster-restart/with-system-key/build.gradle deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/qa/full-cluster-restart/without-system-key/build.gradle b/x-pack/qa/full-cluster-restart/without-system-key/build.gradle deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index f8222669b218e..f689573a61437 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -3,10 +3,10 @@ import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.Version import java.nio.charset.StandardCharsets -import java.util.regex.Matcher // Apply the java plugin to this project so the sources can be edited in an IDE -apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.standalone-test' + unitTest.enabled = false dependencies { @@ -68,161 +68,50 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> return tmpFile.exists() } -Project mainProject = project - compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" forbiddenPatterns { exclude '**/system_key' } -// Tests are pushed down to subprojects -testingConventions.enabled = false - -/** - * Subdirectories of this project are test rolling upgrades with various - * configuration options based on their name. - */ -subprojects { - Matcher m = project.name =~ /with(out)?-system-key/ - if (false == m.matches()) { - throw new InvalidUserDataException("Invalid project name [${project.name}]") - } - boolean withSystemKey = m.group(1) == null - - apply plugin: 'elasticsearch.standalone-test' - - // Use resources from the rolling-upgrade project in subdirectories - sourceSets { - test { - java { - srcDirs = ["${mainProject.projectDir}/src/test/java"] - } - resources { - srcDirs = ["${mainProject.projectDir}/src/test/resources"] - } - } - } - - forbiddenPatterns { - exclude '**/system_key' - } +String outputDir = "${buildDir}/generated-resources/${project.name}" - String outputDir = "${buildDir}/generated-resources/${project.name}" - - // This is a top level task which we will add dependencies to below. - // It is a single task that can be used to backcompat tests against all versions. - task bwcTest { +// This is a top level task which we will add dependencies to below. +// It is a single task that can be used to backcompat tests against all versions. +task bwcTest { description = 'Runs backwards compatibility tests.' group = 'verification' - } +} - String output = "${buildDir}/generated-resources/${project.name}" - task copyTestNodeKeyMaterial(type: Copy) { +task copyTestNodeKeyMaterial(type: Copy) { from project(':x-pack:plugin:core').files('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem', - 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt', - 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') + 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt', + 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') into outputDir - } +} - for (Version version : bwcVersions.wireCompatible) { +for (Version version : bwcVersions.wireCompatible) { String baseName = "v${version}" Task oldClusterTest = tasks.create(name: "${baseName}#oldClusterTest", type: RestIntegTestTask) { - mustRunAfter(precommit) + mustRunAfter(precommit) } configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { - dependsOn copyTestNodeKeyMaterial - if (version.before('6.3.0')) { - String depVersion = version; - if (project.bwcVersions.unreleased.contains(version)) { - depVersion += "-SNAPSHOT" - } - mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}" - } - String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/elasticsearch-users' - setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' - bwcVersion = version - numBwcNodes = 3 - numNodes = 3 - clusterName = 'rolling-upgrade' - waitCondition = waitWithAuth - setting 'xpack.monitoring.exporters._http.type', 'http' - setting 'xpack.monitoring.exporters._http.enabled', 'false' - setting 'xpack.monitoring.exporters._http.auth.username', 'test_user' - setting 'xpack.monitoring.exporters._http.auth.password', 'x-pack-test-password' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - setting 'xpack.security.transport.ssl.enabled', 'true' - setting 'xpack.security.authc.token.enabled', 'true' - setting 'xpack.security.audit.enabled', 'true' - if (project.inFipsJvm) { - setting 'xpack.security.transport.ssl.key', 'testnode.pem' - setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' - keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' - } else { - setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' - setting 'xpack.security.transport.ssl.keystore.password', 'testnode' - } - dependsOn copyTestNodeKeyMaterial - extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') - extraConfigFile 'testnode.pem', new File(outputDir + '/testnode.pem') - extraConfigFile 'testnode.crt', new File(outputDir + '/testnode.crt') - if (version.onOrAfter('7.0.0')) { - setting 'xpack.security.authc.realms.file.file1.order', '0' - setting 'xpack.security.authc.realms.native.native1.order', '1' - } else { - setting 'xpack.security.authc.realms.file1.type', 'file' - setting 'xpack.security.authc.realms.file1.order', '0' - setting 'xpack.security.authc.realms.native1.type', 'native' - setting 'xpack.security.authc.realms.native1.order', '1' - } - - if (withSystemKey) { - if (version.onOrAfter('5.1.0') && version.before('6.0.0')) { - // The setting didn't exist until 5.1.0 - setting 'xpack.security.system_key.required', 'true' - } - if (version.onOrAfter('6.0.0')) { - keystoreFile 'xpack.watcher.encryption_key', "${mainProject.projectDir}/src/test/resources/system_key" - } else { - String systemKeyFile = version.before('6.3.0') ? 'x-pack/system_key' : 'system_key' - extraConfigFile systemKeyFile, "${mainProject.projectDir}/src/test/resources/system_key" - keystoreSetting 'xpack.security.authc.token.passphrase', 'token passphrase' + dependsOn copyTestNodeKeyMaterial + if (version.before('6.3.0')) { + String depVersion = version; + if (project.bwcVersions.unreleased.contains(version)) { + depVersion += "-SNAPSHOT" + } + mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}" } - setting 'xpack.watcher.encrypt_sensitive_data', 'true' - } - - if (version.onOrAfter('6.6.0')) { - setting 'ccr.auto_follow.wait_for_metadata_timeout', '1s' - } - - // Old versions of the code contain an invalid assertion that trips - // during tests. Versions 5.6.9 and 6.2.4 have been fixed by removing - // the assertion, but this is impossible for released versions. - // However, released versions run without assertions, so end users won't - // be suffering the effects. This argument effectively removes the - // incorrect assertion from the older versions used in the BWC tests. - if (version.before('5.6.9') || (version.onOrAfter('6.0.0') && version.before('6.2.4'))) { - jvmArgs '-da:org.elasticsearch.xpack.monitoring.exporter.http.HttpExportBulk' - } - } - - Task oldClusterTestRunner = tasks.getByName("${baseName}#oldClusterTestRunner") - oldClusterTestRunner.configure { - systemProperty 'tests.rest.suite', 'old_cluster' - } - - Closure configureUpgradeCluster = {String name, Task lastRunner, int stopNode, Closure getOtherUnicastHostAddresses -> - configure(extensions.findByName("${baseName}#${name}")) { - dependsOn lastRunner, "${baseName}#oldClusterTestCluster#node${stopNode}.stop" - setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' + String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/elasticsearch-users' + setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' + bwcVersion = version + numBwcNodes = 3 + numNodes = 3 clusterName = 'rolling-upgrade' - otherUnicastHostAddresses = { getOtherUnicastHostAddresses() } - /* Override the data directory so the new node always gets the node we - * just stopped's data directory. */ - dataDir = { nodeNumber -> oldClusterTest.nodes[stopNode].dataDir } waitCondition = waitWithAuth setting 'xpack.monitoring.exporters._http.type', 'http' setting 'xpack.monitoring.exporters._http.enabled', 'false' @@ -231,154 +120,195 @@ subprojects { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' setting 'xpack.security.transport.ssl.enabled', 'true' + setting 'xpack.security.authc.token.enabled', 'true' + setting 'xpack.security.audit.enabled', 'true' if (project.inFipsJvm) { - setting 'xpack.security.transport.ssl.key', 'testnode.pem' - setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' - keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' + setting 'xpack.security.transport.ssl.key', 'testnode.pem' + setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' } else { - setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' - setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' + setting 'xpack.security.transport.ssl.keystore.password', 'testnode' } - setting 'node.attr.upgraded', 'true' - setting 'xpack.security.authc.token.enabled', 'true' - setting 'xpack.security.audit.enabled', 'true' - setting 'node.name', "upgraded-node-${stopNode}" dependsOn copyTestNodeKeyMaterial extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') extraConfigFile 'testnode.pem', new File(outputDir + '/testnode.pem') extraConfigFile 'testnode.crt', new File(outputDir + '/testnode.crt') if (version.onOrAfter('7.0.0')) { - setting 'xpack.security.authc.realms.file.file1.order', '0' - setting 'xpack.security.authc.realms.native.native1.order', '1' + setting 'xpack.security.authc.realms.file.file1.order', '0' + setting 'xpack.security.authc.realms.native.native1.order', '1' } else { - setting 'xpack.security.authc.realms.file1.type', 'file' - setting 'xpack.security.authc.realms.file1.order', '0' - setting 'xpack.security.authc.realms.native1.type', 'native' - setting 'xpack.security.authc.realms.native1.order', '1' + setting 'xpack.security.authc.realms.file1.type', 'file' + setting 'xpack.security.authc.realms.file1.order', '0' + setting 'xpack.security.authc.realms.native1.type', 'native' + setting 'xpack.security.authc.realms.native1.order', '1' + } + + keystoreFile 'xpack.watcher.encryption_key', "${project.projectDir}/src/test/resources/system_key" + setting 'xpack.watcher.encrypt_sensitive_data', 'true' + + if (version.onOrAfter('6.6.0')) { + setting 'ccr.auto_follow.wait_for_metadata_timeout', '1s' } - if (withSystemKey) { - setting 'xpack.watcher.encrypt_sensitive_data', 'true' - keystoreFile 'xpack.watcher.encryption_key', "${mainProject.projectDir}/src/test/resources/system_key" + + // Old versions of the code contain an invalid assertion that trips + // during tests. Versions 5.6.9 and 6.2.4 have been fixed by removing + // the assertion, but this is impossible for released versions. + // However, released versions run without assertions, so end users won't + // be suffering the effects. This argument effectively removes the + // incorrect assertion from the older versions used in the BWC tests. + if (version.before('5.6.9') || (version.onOrAfter('6.0.0') && version.before('6.2.4'))) { + jvmArgs '-da:org.elasticsearch.xpack.monitoring.exporter.http.HttpExportBulk' } - if (version.before('6.0.0')) { - keystoreSetting 'xpack.security.authc.token.passphrase', 'token passphrase' + } + + Task oldClusterTestRunner = tasks.getByName("${baseName}#oldClusterTestRunner") + oldClusterTestRunner.configure { + systemProperty 'tests.rest.suite', 'old_cluster' + } + + Closure configureUpgradeCluster = {String name, Task lastRunner, int stopNode, Closure getOtherUnicastHostAddresses -> + configure(extensions.findByName("${baseName}#${name}")) { + dependsOn lastRunner, "${baseName}#oldClusterTestCluster#node${stopNode}.stop" + setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' + clusterName = 'rolling-upgrade' + otherUnicastHostAddresses = { getOtherUnicastHostAddresses() } + /* Override the data directory so the new node always gets the node we + * just stopped's data directory. */ + dataDir = { nodeNumber -> oldClusterTest.nodes[stopNode].dataDir } + waitCondition = waitWithAuth + setting 'xpack.monitoring.exporters._http.type', 'http' + setting 'xpack.monitoring.exporters._http.enabled', 'false' + setting 'xpack.monitoring.exporters._http.auth.username', 'test_user' + setting 'xpack.monitoring.exporters._http.auth.password', 'x-pack-test-password' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + setting 'xpack.security.transport.ssl.enabled', 'true' + if (project.inFipsJvm) { + setting 'xpack.security.transport.ssl.key', 'testnode.pem' + setting 'xpack.security.transport.ssl.certificate', 'testnode.crt' + keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' + } else { + setting 'xpack.security.transport.ssl.keystore.path', 'testnode.jks' + setting 'xpack.security.transport.ssl.keystore.password', 'testnode' + } + setting 'node.attr.upgraded', 'true' + setting 'xpack.security.authc.token.enabled', 'true' + setting 'xpack.security.audit.enabled', 'true' + setting 'node.name', "upgraded-node-${stopNode}" + dependsOn copyTestNodeKeyMaterial + extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') + extraConfigFile 'testnode.pem', new File(outputDir + '/testnode.pem') + extraConfigFile 'testnode.crt', new File(outputDir + '/testnode.crt') + if (version.onOrAfter('7.0.0')) { + setting 'xpack.security.authc.realms.file.file1.order', '0' + setting 'xpack.security.authc.realms.native.native1.order', '1' + } else { + setting 'xpack.security.authc.realms.file1.type', 'file' + setting 'xpack.security.authc.realms.file1.order', '0' + setting 'xpack.security.authc.realms.native1.type', 'native' + setting 'xpack.security.authc.realms.native1.order', '1' + } + setting 'xpack.watcher.encrypt_sensitive_data', 'true' + keystoreFile 'xpack.watcher.encryption_key', "${project.projectDir}/src/test/resources/system_key" + if (version.before('6.0.0')) { + keystoreSetting 'xpack.security.authc.token.passphrase', 'token passphrase' + } } - } } Task oneThirdUpgradedTest = tasks.create(name: "${baseName}#oneThirdUpgradedTest", type: RestIntegTestTask) configureUpgradeCluster("oneThirdUpgradedTestCluster", oldClusterTestRunner, 0, - // Use all running nodes as seed nodes so there is no race between pinging and the tests - { [oldClusterTest.nodes.get(1).transportUri(), oldClusterTest.nodes.get(2).transportUri()] }) + // Use all running nodes as seed nodes so there is no race between pinging and the tests + { [oldClusterTest.nodes.get(1).transportUri(), oldClusterTest.nodes.get(2).transportUri()] }) Task oneThirdUpgradedTestRunner = tasks.getByName("${baseName}#oneThirdUpgradedTestRunner") oneThirdUpgradedTestRunner.configure { - systemProperty 'tests.rest.suite', 'mixed_cluster' - systemProperty 'tests.first_round', 'true' - // We only need to run these tests once so we may as well do it when we're two thirds upgraded - systemProperty 'tests.rest.blacklist', [ - 'mixed_cluster/10_basic/Start scroll in mixed cluster on upgraded node that we will continue after upgrade', - 'mixed_cluster/30_ml_jobs_crud/Create a job in the mixed cluster and write some data', - 'mixed_cluster/40_ml_datafeed_crud/Put job and datafeed in mixed cluster', + systemProperty 'tests.rest.suite', 'mixed_cluster' + systemProperty 'tests.first_round', 'true' + // We only need to run these tests once so we may as well do it when we're two thirds upgraded + systemProperty 'tests.rest.blacklist', [ + 'mixed_cluster/10_basic/Start scroll in mixed cluster on upgraded node that we will continue after upgrade', + 'mixed_cluster/30_ml_jobs_crud/Create a job in the mixed cluster and write some data', + 'mixed_cluster/40_ml_datafeed_crud/Put job and datafeed in mixed cluster', ].join(',') - finalizedBy "${baseName}#oldClusterTestCluster#node1.stop" + finalizedBy "${baseName}#oldClusterTestCluster#node1.stop" } Task twoThirdsUpgradedTest = tasks.create(name: "${baseName}#twoThirdsUpgradedTest", type: RestIntegTestTask) configureUpgradeCluster("twoThirdsUpgradedTestCluster", oneThirdUpgradedTestRunner, 1, - // Use all running nodes as seed nodes so there is no race between pinging and the tests - { [oldClusterTest.nodes.get(2).transportUri(), oneThirdUpgradedTest.nodes.get(0).transportUri()] }) + // Use all running nodes as seed nodes so there is no race between pinging and the tests + { [oldClusterTest.nodes.get(2).transportUri(), oneThirdUpgradedTest.nodes.get(0).transportUri()] }) Task twoThirdsUpgradedTestRunner = tasks.getByName("${baseName}#twoThirdsUpgradedTestRunner") twoThirdsUpgradedTestRunner.configure { - systemProperty 'tests.rest.suite', 'mixed_cluster' - systemProperty 'tests.first_round', 'false' - finalizedBy "${baseName}#oldClusterTestCluster#node2.stop" + systemProperty 'tests.rest.suite', 'mixed_cluster' + systemProperty 'tests.first_round', 'false' + finalizedBy "${baseName}#oldClusterTestCluster#node2.stop" } Task upgradedClusterTest = tasks.create(name: "${baseName}#upgradedClusterTest", type: RestIntegTestTask) configureUpgradeCluster("upgradedClusterTestCluster", twoThirdsUpgradedTestRunner, 2, - // Use all running nodes as seed nodes so there is no race between pinging and the tests - { [oneThirdUpgradedTest.nodes.get(0).transportUri(), twoThirdsUpgradedTest.nodes.get(0).transportUri()] }) + // Use all running nodes as seed nodes so there is no race between pinging and the tests + { [oneThirdUpgradedTest.nodes.get(0).transportUri(), twoThirdsUpgradedTest.nodes.get(0).transportUri()] }) Task upgradedClusterTestRunner = tasks.getByName("${baseName}#upgradedClusterTestRunner") upgradedClusterTestRunner.configure { - systemProperty 'tests.rest.suite', 'upgraded_cluster' - /* - * Force stopping all the upgraded nodes after the test runner - * so they are alive during the test. - */ - finalizedBy "${baseName}#oneThirdUpgradedTestCluster#stop" - finalizedBy "${baseName}#twoThirdsUpgradedTestCluster#stop" - - // migration tests should only run when the original/old cluster nodes where versions < 5.2.0. - // this stinks but we do the check here since our rest tests do not support conditionals - // otherwise we could check the index created version - String versionStr = project.extensions.findByName("${baseName}#oldClusterTestCluster").properties.get('bwcVersion') - String[] versionParts = versionStr.split('\\.') - if (versionParts[0].equals("5")) { - Integer minor = Integer.parseInt(versionParts[1]) - if (minor >= 2) { - systemProperty 'tests.rest.blacklist', '/20_security/Verify default password migration results in upgraded cluster' - } - } + systemProperty 'tests.rest.suite', 'upgraded_cluster' + /* + * Force stopping all the upgraded nodes after the test runner + * so they are alive during the test. + */ + finalizedBy "${baseName}#oneThirdUpgradedTestCluster#stop" + finalizedBy "${baseName}#twoThirdsUpgradedTestCluster#stop" + + // migration tests should only run when the original/old cluster nodes where versions < 5.2.0. + // this stinks but we do the check here since our rest tests do not support conditionals + // otherwise we could check the index created version + String versionStr = project.extensions.findByName("${baseName}#oldClusterTestCluster").properties.get('bwcVersion') + String[] versionParts = versionStr.split('\\.') + if (versionParts[0].equals("5")) { + Integer minor = Integer.parseInt(versionParts[1]) + if (minor >= 2) { + systemProperty 'tests.rest.blacklist', '/20_security/Verify default password migration results in upgraded cluster' + } + } } Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") { - dependsOn = [upgradedClusterTest] + dependsOn = [upgradedClusterTest] } if (project.bwc_tests_enabled) { - bwcTest.dependsOn(versionBwcTest) + bwcTest.dependsOn(versionBwcTest) } - } - - unitTest.enabled = false // no unit tests for rolling upgrades, only the rest integration test +} - // basic integ tests includes testing bwc against the most recent version - task bwcTestSnapshots { +// basic integ tests includes testing bwc against the most recent version +task bwcTestSnapshots { if (project.bwc_tests_enabled) { - for (final def version : bwcVersions.unreleasedWireCompatible) { - dependsOn "v${version}#bwcTest" - } + for (final def version : bwcVersions.unreleasedWireCompatible) { + dependsOn "v${version}#bwcTest" + } } - } - check.dependsOn(bwcTestSnapshots) - - dependencies { - // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here - testCompile project(path: xpackModule('core'), configuration: 'default') - testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') - testCompile project(path: xpackModule('watcher')) - } - - compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" +} +check.dependsOn(bwcTestSnapshots) - // copy x-pack plugin info so it is on the classpath and security manager has the right permissions - task copyXPackRestSpec(type: Copy) { +// copy x-pack plugin info so it is on the classpath and security manager has the right permissions +task copyXPackRestSpec(type: Copy) { dependsOn(project.configurations.restSpec, 'processTestResources') from project(xpackProject('plugin').path).sourceSets.test.resources include 'rest-api-spec/api/**' into project.sourceSets.test.output.resourcesDir - } +} - task copyXPackPluginProps(type: Copy) { +task copyXPackPluginProps(type: Copy) { dependsOn(copyXPackRestSpec) from project(xpackModule('core')).file('src/main/plugin-metadata') from project(xpackModule('core')).tasks.pluginProperties into outputDir - } - project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) - - repositories { - maven { - url "https://artifacts.elastic.co/maven" - } - maven { - url "https://snapshots.elastic.co/maven" - } - } } +project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) diff --git a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle b/x-pack/qa/rolling-upgrade/with-system-key/build.gradle deleted file mode 100644 index 03505e01dedd8..0000000000000 --- a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle +++ /dev/null @@ -1 +0,0 @@ -group = "${group}.x-pack.qa.rolling-upgrade.with-system-key" diff --git a/x-pack/qa/rolling-upgrade/without-system-key/build.gradle b/x-pack/qa/rolling-upgrade/without-system-key/build.gradle deleted file mode 100644 index aa7ac502eb3e6..0000000000000 --- a/x-pack/qa/rolling-upgrade/without-system-key/build.gradle +++ /dev/null @@ -1 +0,0 @@ -group = "${group}.x-pack.qa.rolling-upgrade.without-system-key" From a58a829edef5ad3d1920f2b9f810ecf8fec89f41 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 28 Mar 2019 14:39:34 -0400 Subject: [PATCH 09/63] [DOCS] Document common settings for snapshot repository plugins (#40475) --- docs/plugins/repository-azure.asciidoc | 4 +--- docs/plugins/repository-gcs.asciidoc | 2 ++ docs/plugins/repository-hdfs.asciidoc | 2 ++ docs/plugins/repository-s3.asciidoc | 2 ++ docs/plugins/repository-shared-settings.asciidoc | 11 +++++++++++ 5 files changed, 18 insertions(+), 3 deletions(-) create mode 100644 docs/plugins/repository-shared-settings.asciidoc diff --git a/docs/plugins/repository-azure.asciidoc b/docs/plugins/repository-azure.asciidoc index 9249efd5d1744..61dcadd6e10d6 100644 --- a/docs/plugins/repository-azure.asciidoc +++ b/docs/plugins/repository-azure.asciidoc @@ -126,9 +126,7 @@ The Azure repository supports following settings: setting doesn't affect index files that are already compressed by default. Defaults to `true`. -`readonly`:: - - Makes repository read-only. Defaults to `false`. +include::repository-shared-settings.asciidoc[] `location_mode`:: diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc index f655d29307074..b34c9456a9306 100644 --- a/docs/plugins/repository-gcs.asciidoc +++ b/docs/plugins/repository-gcs.asciidoc @@ -240,6 +240,8 @@ The following settings are supported: setting doesn't affect index files that are already compressed by default. Defaults to `true`. +include::repository-shared-settings.asciidoc[] + `application_name`:: deprecated[7.0.0, This setting is now defined in the <>] diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index e798682a38699..bedb0e7e1ef87 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -64,6 +64,8 @@ The following settings are supported: Whether to compress the metadata or not. (Enabled by default) +include::repository-shared-settings.asciidoc[] + `chunk_size`:: Override the chunk size. (Disabled by default) diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index 7c4e763a3b04a..084d67f236472 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -213,6 +213,8 @@ The following settings are supported: setting doesn't affect index files that are already compressed by default. Defaults to `true`. +include::repository-shared-settings.asciidoc[] + `server_side_encryption`:: When set to `true` files are encrypted on server side using AES256 diff --git a/docs/plugins/repository-shared-settings.asciidoc b/docs/plugins/repository-shared-settings.asciidoc new file mode 100644 index 0000000000000..ca9345e0ffc2c --- /dev/null +++ b/docs/plugins/repository-shared-settings.asciidoc @@ -0,0 +1,11 @@ +`max_restore_bytes_per_sec`:: + + Throttles per node restore rate. Defaults to `40mb` per second. + +`max_snapshot_bytes_per_sec`:: + + Throttles per node snapshot rate. Defaults to `40mb` per second. + +`readonly`:: + + Makes repository read-only. Defaults to `false`. \ No newline at end of file From dd8b4bbc34a11c92a622b742b00cf1c34235e8e9 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 28 Mar 2019 15:05:36 -0400 Subject: [PATCH 10/63] [DOCS] Correct keystore commands for Email and Jira actions in Watcher (#40417) --- x-pack/docs/en/watcher/actions/email.asciidoc | 6 +++--- x-pack/docs/en/watcher/actions/jira.asciidoc | 11 +++++++---- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/x-pack/docs/en/watcher/actions/email.asciidoc b/x-pack/docs/en/watcher/actions/email.asciidoc index 61b0fa8cf9e7f..1f8de38319100 100644 --- a/x-pack/docs/en/watcher/actions/email.asciidoc +++ b/x-pack/docs/en/watcher/actions/email.asciidoc @@ -325,7 +325,7 @@ In order to store the account SMTP password, use the keystore command [source,yaml] -------------------------------------------------- -bin/elasticsearch-keystore xpack.notification.email.account.gmail_account.smtp.secure_password +bin/elasticsearch-keystore add xpack.notification.email.account.gmail_account.smtp.secure_password -------------------------------------------------- If you get an authentication error that indicates that you need to continue the @@ -363,7 +363,7 @@ In order to store the account SMTP password, use the keystore command [source,yaml] -------------------------------------------------- -bin/elasticsearch-keystore xpack.notification.email.account.outlook_account.smtp.secure_password +bin/elasticsearch-keystore add xpack.notification.email.account.outlook_account.smtp.secure_password -------------------------------------------------- @@ -400,7 +400,7 @@ In order to store the account SMTP password, use the keystore command [source,yaml] -------------------------------------------------- -bin/elasticsearch-keystore xpack.notification.email.account.ses_account.smtp.secure_password +bin/elasticsearch-keystore add xpack.notification.email.account.ses_account.smtp.secure_password -------------------------------------------------- NOTE: You need to use your Amazon SES SMTP credentials to send email through diff --git a/x-pack/docs/en/watcher/actions/jira.asciidoc b/x-pack/docs/en/watcher/actions/jira.asciidoc index f0b9c714181b8..4608ee6ab1af5 100644 --- a/x-pack/docs/en/watcher/actions/jira.asciidoc +++ b/x-pack/docs/en/watcher/actions/jira.asciidoc @@ -109,12 +109,15 @@ Jira account you need to specify (see {ref}/secure-settings.html[secure settings [source,yaml] -------------------------------------------------- -bin/elasticsearch-keystore xpack.notification.jira.account.monitoring.secure_url -bin/elasticsearch-keystore xpack.notification.jira.account.monitoring.secure_user -bin/elasticsearch-keystore xpack.notification.jira.account.monitoring.secure_password +bin/elasticsearch-keystore add xpack.notification.jira.account.monitoring.secure_url +bin/elasticsearch-keystore add xpack.notification.jira.account.monitoring.secure_user +bin/elasticsearch-keystore add xpack.notification.jira.account.monitoring.secure_password -------------------------------------------------- -deprecated[The insecure way of storing sensitive data (`url`, `user` and `password`) in the configuration file or the cluster settings is deprecated] +[WARNING] +====== +Storing sensitive data (`url`, `user` and `password`) in the configuration file or the cluster settings is insecure and has been deprecated. Please use {es}'s secure {ref}/secure-settings.html[keystore] method instead. +====== To avoid credentials that transit in clear text over the network, {watcher} will reject `url` settings like `http://internal-jira.elastic.co` that are based on From 8be33d1ec773623785b75720e5de6679820a9a5e Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 28 Mar 2019 16:22:59 -0400 Subject: [PATCH 11/63] Add start and stop time to cat recovery API (#40378) The cat recovery API is incredibly useful. Yet it is missing the start and stop time as an option from the output. This commit adds these as options to the cat recovery API. We elect to make these not visible by default to avoid breaking the output that users might rely on. --- .../elasticsearch/action/ActionModule.java | 3 +- ...Action.java => RestCatRecoveryAction.java} | 13 +- .../action/cat/RestRecoveryActionTests.java | 132 ++++++++++-------- 3 files changed, 90 insertions(+), 58 deletions(-) rename server/src/main/java/org/elasticsearch/rest/action/cat/{RestRecoveryAction.java => RestCatRecoveryAction.java} (89%) diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 83e1e01614435..2cfe66372115f 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -289,6 +289,7 @@ import org.elasticsearch.rest.action.cat.RestAliasAction; import org.elasticsearch.rest.action.cat.RestAllocationAction; import org.elasticsearch.rest.action.cat.RestCatAction; +import org.elasticsearch.rest.action.cat.RestCatRecoveryAction; import org.elasticsearch.rest.action.cat.RestFielddataAction; import org.elasticsearch.rest.action.cat.RestHealthAction; import org.elasticsearch.rest.action.cat.RestIndicesAction; @@ -665,7 +666,7 @@ public void initRestHandlers(Supplier nodesInCluster) { // Fully qualified to prevent interference with rest.action.count.RestCountAction registerHandler.accept(new org.elasticsearch.rest.action.cat.RestCountAction(settings, restController)); // Fully qualified to prevent interference with rest.action.indices.RestRecoveryAction - registerHandler.accept(new org.elasticsearch.rest.action.cat.RestRecoveryAction(settings, restController)); + registerHandler.accept(new RestCatRecoveryAction(settings, restController)); registerHandler.accept(new RestHealthAction(settings, restController)); registerHandler.accept(new org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction(settings, restController)); registerHandler.accept(new RestAliasAction(settings, restController)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatRecoveryAction.java similarity index 89% rename from server/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java rename to server/src/main/java/org/elasticsearch/rest/action/cat/RestCatRecoveryAction.java index a66741c2d9411..0cea93e4e7ee7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatRecoveryAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentElasticsearchExtension; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -47,8 +48,8 @@ * in a string format, designed to be used at the command line. An Index can * be specified to limit output to a particular index or indices. */ -public class RestRecoveryAction extends AbstractCatAction { - public RestRecoveryAction(Settings settings, RestController restController) { +public class RestCatRecoveryAction extends AbstractCatAction { + public RestCatRecoveryAction(Settings settings, RestController restController) { super(settings); restController.registerHandler(GET, "/_cat/recovery", this); restController.registerHandler(GET, "/_cat/recovery/{index}", this); @@ -86,6 +87,10 @@ protected Table getTableWithHeader(RestRequest request) { t.startHeaders() .addCell("index", "alias:i,idx;desc:index name") .addCell("shard", "alias:s,sh;desc:shard name") + .addCell("start_time", "default:false;alias:start;desc:recovery start time") + .addCell("start_time_millis", "default:false;alias:start_millis;desc:recovery start time in epoch milliseconds") + .addCell("stop_time", "default:false;alias:stop;desc:recovery stop time") + .addCell("stop_time_millis", "default:false;alias:stop_millis;desc:recovery stop time in epoch milliseconds") .addCell("time", "alias:t,ti;desc:recovery time") .addCell("type", "alias:ty;desc:recovery type") .addCell("stage", "alias:st;desc:recovery stage") @@ -149,6 +154,10 @@ public int compare(RecoveryState o1, RecoveryState o2) { t.startRow(); t.addCell(index); t.addCell(state.getShardId().id()); + t.addCell(XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(state.getTimer().startTime())); + t.addCell(state.getTimer().startTime()); + t.addCell(XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(state.getTimer().stopTime())); + t.addCell(state.getTimer().stopTime()); t.addCell(new TimeValue(state.getTimer().time())); t.addCell(state.getRecoverySource().getType().toString().toLowerCase(Locale.ROOT)); t.addCell(state.getStage().toString().toLowerCase(Locale.ROOT)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java index 25f04532ac8ce..7bfa50ff2b724 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentElasticsearchExtension; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryState; @@ -37,7 +38,9 @@ import org.elasticsearch.usage.UsageService; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -53,7 +56,7 @@ public void testRestRecoveryAction() { final Settings settings = Settings.EMPTY; UsageService usageService = new UsageService(); final RestController restController = new RestController(Collections.emptySet(), null, null, null, usageService); - final RestRecoveryAction action = new RestRecoveryAction(settings, restController); + final RestCatRecoveryAction action = new RestCatRecoveryAction(settings, restController); final int totalShards = randomIntBetween(1, 32); final int successfulShards = Math.max(0, totalShards - randomIntBetween(1, 2)); final int failedShards = totalShards - successfulShards; @@ -64,7 +67,11 @@ public void testRestRecoveryAction() { final RecoveryState state = mock(RecoveryState.class); when(state.getShardId()).thenReturn(new ShardId(new Index("index", "_na_"), i)); final RecoveryState.Timer timer = mock(RecoveryState.Timer.class); - when(timer.time()).thenReturn((long)randomIntBetween(1000000, 10 * 1000000)); + final long startTime = randomLongBetween(0, new Date().getTime()); + when(timer.startTime()).thenReturn(startTime); + final long time = randomLongBetween(1000000, 10 * 1000000); + when(timer.time()).thenReturn(time); + when(timer.stopTime()).thenReturn(startTime + time); when(state.getTimer()).thenReturn(timer); when(state.getRecoverySource()).thenReturn(TestShardRouting.randomRecoverySource()); when(state.getStage()).thenReturn(randomFrom(RecoveryState.Stage.values())); @@ -122,63 +129,78 @@ public void testRestRecoveryAction() { List headers = table.getHeaders(); - assertThat(headers.get(0).value, equalTo("index")); - assertThat(headers.get(1).value, equalTo("shard")); - assertThat(headers.get(2).value, equalTo("time")); - assertThat(headers.get(3).value, equalTo("type")); - assertThat(headers.get(4).value, equalTo("stage")); - assertThat(headers.get(5).value, equalTo("source_host")); - assertThat(headers.get(6).value, equalTo("source_node")); - assertThat(headers.get(7).value, equalTo("target_host")); - assertThat(headers.get(8).value, equalTo("target_node")); - assertThat(headers.get(9).value, equalTo("repository")); - assertThat(headers.get(10).value, equalTo("snapshot")); - assertThat(headers.get(11).value, equalTo("files")); - assertThat(headers.get(12).value, equalTo("files_recovered")); - assertThat(headers.get(13).value, equalTo("files_percent")); - assertThat(headers.get(14).value, equalTo("files_total")); - assertThat(headers.get(15).value, equalTo("bytes")); - assertThat(headers.get(16).value, equalTo("bytes_recovered")); - assertThat(headers.get(17).value, equalTo("bytes_percent")); - assertThat(headers.get(18).value, equalTo("bytes_total")); - assertThat(headers.get(19).value, equalTo("translog_ops")); - assertThat(headers.get(20).value, equalTo("translog_ops_recovered")); - assertThat(headers.get(21).value, equalTo("translog_ops_percent")); + final List expectedHeaders = Arrays.asList( + "index", + "shard", + "start_time", + "start_time_millis", + "stop_time", + "stop_time_millis", + "time", + "type", + "stage", + "source_host", + "source_node", + "target_host", + "target_node", + "repository", + "snapshot", + "files", + "files_recovered", + "files_percent", + "files_total", + "bytes", + "bytes_recovered", + "bytes_percent", + "bytes_total", + "translog_ops", + "translog_ops_recovered", + "translog_ops_percent"); + + for (int i = 0; i < expectedHeaders.size(); i++) { + assertThat(headers.get(i).value, equalTo(expectedHeaders.get(i))); + } assertThat(table.getRows().size(), equalTo(successfulShards)); + for (int i = 0; i < successfulShards; i++) { final RecoveryState state = recoveryStates.get(i); - List cells = table.getRows().get(i); - assertThat(cells.get(0).value, equalTo("index")); - assertThat(cells.get(1).value, equalTo(i)); - assertThat(cells.get(2).value, equalTo(new TimeValue(state.getTimer().time()))); - assertThat(cells.get(3).value, equalTo(state.getRecoverySource().getType().name().toLowerCase(Locale.ROOT))); - assertThat(cells.get(4).value, equalTo(state.getStage().name().toLowerCase(Locale.ROOT))); - assertThat(cells.get(5).value, equalTo(state.getSourceNode() == null ? "n/a" : state.getSourceNode().getHostName())); - assertThat(cells.get(6).value, equalTo(state.getSourceNode() == null ? "n/a" : state.getSourceNode().getName())); - assertThat(cells.get(7).value, equalTo(state.getTargetNode().getHostName())); - assertThat(cells.get(8).value, equalTo(state.getTargetNode().getName())); - assertThat( - cells.get(9).value, - equalTo(state.getRecoverySource() == null || state.getRecoverySource().getType() != RecoverySource.Type.SNAPSHOT ? - "n/a" : - ((SnapshotRecoverySource) state.getRecoverySource()).snapshot().getRepository())); - assertThat( - cells.get(10).value, - equalTo(state.getRecoverySource() == null || state.getRecoverySource().getType() != RecoverySource.Type.SNAPSHOT ? - "n/a" : - ((SnapshotRecoverySource) state.getRecoverySource()).snapshot().getSnapshotId().getName())); - assertThat(cells.get(11).value, equalTo(state.getIndex().totalRecoverFiles())); - assertThat(cells.get(12).value, equalTo(state.getIndex().recoveredFileCount())); - assertThat(cells.get(13).value, equalTo(percent(state.getIndex().recoveredFilesPercent()))); - assertThat(cells.get(14).value, equalTo(state.getIndex().totalFileCount())); - assertThat(cells.get(15).value, equalTo(state.getIndex().totalRecoverBytes())); - assertThat(cells.get(16).value, equalTo(state.getIndex().recoveredBytes())); - assertThat(cells.get(17).value, equalTo(percent(state.getIndex().recoveredBytesPercent()))); - assertThat(cells.get(18).value, equalTo(state.getIndex().totalBytes())); - assertThat(cells.get(19).value, equalTo(state.getTranslog().totalOperations())); - assertThat(cells.get(20).value, equalTo(state.getTranslog().recoveredOperations())); - assertThat(cells.get(21).value, equalTo(percent(state.getTranslog().recoveredPercent()))); + final List expectedValues = Arrays.asList( + "index", + i, + XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(state.getTimer().startTime()), + state.getTimer().startTime(), + XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(state.getTimer().stopTime()), + state.getTimer().stopTime(), + new TimeValue(state.getTimer().time()), + state.getRecoverySource().getType().name().toLowerCase(Locale.ROOT), + state.getStage().name().toLowerCase(Locale.ROOT), + state.getSourceNode() == null ? "n/a" : state.getSourceNode().getHostName(), + state.getSourceNode() == null ? "n/a" : state.getSourceNode().getName(), + state.getTargetNode().getHostName(), + state.getTargetNode().getName(), + state.getRecoverySource() == null || state.getRecoverySource().getType() != RecoverySource.Type.SNAPSHOT ? + "n/a" : + ((SnapshotRecoverySource) state.getRecoverySource()).snapshot().getRepository(), + state.getRecoverySource() == null || state.getRecoverySource().getType() != RecoverySource.Type.SNAPSHOT ? + "n/a" : + ((SnapshotRecoverySource) state.getRecoverySource()).snapshot().getSnapshotId().getName(), + state.getIndex().totalRecoverFiles(), + state.getIndex().recoveredFileCount(), + percent(state.getIndex().recoveredFilesPercent()), + state.getIndex().totalFileCount(), + state.getIndex().totalRecoverBytes(), + state.getIndex().recoveredBytes(), + percent(state.getIndex().recoveredBytesPercent()), + state.getIndex().totalBytes(), + state.getTranslog().totalOperations(), + state.getTranslog().recoveredOperations(), + percent(state.getTranslog().recoveredPercent())); + + final List cells = table.getRows().get(i); + for (int j = 0; j < expectedValues.size(); j++) { + assertThat(cells.get(j).value, equalTo(expectedValues.get(j))); + } } } From 0dd2fdfef2086d17e0ab0bcbf7fc1d662cdae9ca Mon Sep 17 00:00:00 2001 From: Jeff Hajewski Date: Thu, 28 Mar 2019 16:07:03 -0500 Subject: [PATCH 12/63] Update max dims for vectors to 1024. (#40597) --- .../org/elasticsearch/index/mapper/DenseVectorFieldMapper.java | 2 +- .../org/elasticsearch/index/mapper/SparseVectorFieldMapper.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java index ec78420cc0b05..d48a457ba08cd 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java @@ -46,7 +46,7 @@ public class DenseVectorFieldMapper extends FieldMapper implements ArrayValueMapperParser { public static final String CONTENT_TYPE = "dense_vector"; - public static short MAX_DIMS_COUNT = 500; //maximum allowed number of dimensions + public static short MAX_DIMS_COUNT = 1024; //maximum allowed number of dimensions private static final byte INT_BYTES = 4; public static class Defaults { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java index bfbf68c5a7662..931e27bc1c19f 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java @@ -46,7 +46,7 @@ public class SparseVectorFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "sparse_vector"; - public static short MAX_DIMS_COUNT = 500; //maximum allowed number of dimensions + public static short MAX_DIMS_COUNT = 1024; //maximum allowed number of dimensions public static int MAX_DIMS_NUMBER = 65535; //maximum allowed dimension's number public static class Defaults { From 0355bc1f0f502e9bae5fc26b2e96a29add8b38e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 28 Mar 2019 22:11:46 +0100 Subject: [PATCH 13/63] Fixing typo in test error message (#40611) --- .../xpack/ccr/action/TransportResumeFollowActionTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java index b9a3c7ed021f4..44f8583bb9b5a 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java @@ -238,7 +238,7 @@ public void testDynamicIndexSettingsAreClassified() { if (setting.isDynamic()) { boolean notReplicated = TransportResumeFollowAction.NON_REPLICATED_SETTINGS.contains(setting); boolean replicated = replicatedSettings.contains(setting); - assertThat("setting [" + setting.getKey() + "] is not classified as replicated xor not replicated", + assertThat("setting [" + setting.getKey() + "] is not classified as replicated or not replicated", notReplicated ^ replicated, is(true)); } } From e08881b54e79903aff0b07aceb112e0e38949afd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 28 Mar 2019 22:52:49 +0100 Subject: [PATCH 14/63] Muting XContentParserTests#testSubParserArray --- .../org/elasticsearch/common/xcontent/XContentParserTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java index e98f1e3d58510..0fe8a2b9f91fb 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.core.JsonParseException; + import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -368,6 +369,7 @@ public void testSubParserObject() throws IOException { } } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/40617") public void testSubParserArray() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); int numberOfArrayElements = randomInt(10); From d0acf6285cd7845e554fe988e279d101d5631758 Mon Sep 17 00:00:00 2001 From: Andy Bristol Date: Thu, 28 Mar 2019 16:06:06 -0700 Subject: [PATCH 15/63] lower bwc skip for search as you type (#40599) --- .../rest-api-spec/test/search-as-you-type/10_basic.yml | 2 +- .../rest-api-spec/test/search-as-you-type/20_highlighting.yml | 2 +- .../rest-api-spec/test/search/310_match_bool_prefix.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/mapper-extras/src/test/resources/rest-api-spec/test/search-as-you-type/10_basic.yml b/modules/mapper-extras/src/test/resources/rest-api-spec/test/search-as-you-type/10_basic.yml index f9b76a7399a37..3ddcd89347fcb 100644 --- a/modules/mapper-extras/src/test/resources/rest-api-spec/test/search-as-you-type/10_basic.yml +++ b/modules/mapper-extras/src/test/resources/rest-api-spec/test/search-as-you-type/10_basic.yml @@ -1,6 +1,6 @@ setup: - skip: - version: " - 7.1.0" + version: " - 7.0.99" reason: "added in 7.1.0" - do: diff --git a/modules/mapper-extras/src/test/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml b/modules/mapper-extras/src/test/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml index 5a96a11a47586..82a599ce686c2 100644 --- a/modules/mapper-extras/src/test/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml +++ b/modules/mapper-extras/src/test/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml @@ -1,6 +1,6 @@ setup: - skip: - version: " - 7.1.0" + version: " - 7.0.99" reason: "added in 7.1.0" - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix.yml index 957d26036b4a8..bcc28c7853425 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix.yml @@ -1,6 +1,6 @@ setup: - skip: - version: " - 7.1.0" + version: " - 7.0.99" reason: "added in 7.1.0" - do: From 80b88cff3e3a73d30e4c11460ade6927efeac6be Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 29 Mar 2019 08:01:06 +0200 Subject: [PATCH 16/63] Fix 3rd pary S3 tests (#40588) * Fix 3rd pary S3 tests This is allready excluded on line 186, by doing this again here, the other exclusion from arround that line are removed causing the tests to fail. * Fix blacklisting with the fixture --- plugins/repository-s3/build.gradle | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index d27afb0861a98..8a2edeb78c507 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -177,6 +177,10 @@ if (useFixture) { } plugin file(tasks.bundlePlugin.archiveFile) } + + integTest.runner { + systemProperty 'tests.rest.blacklist', 'repository_s3/50_repository_ecs_credentials/*' + } } else { integTest.runner { systemProperty 'tests.rest.blacklist', @@ -259,10 +263,6 @@ testClusters.integTest { } } -integTest.runner { - systemProperty 'tests.rest.blacklist', 'repository_s3/50_repository_ecs_credentials/*' -} - if (useFixture) { task integTestECS(type: RestIntegTestTask.class) { description = "Runs tests using the ECS repository." From e48fe8b33d72e6f14e1d7f9fc392664e5d6faa48 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 29 Mar 2019 08:08:04 +0200 Subject: [PATCH 17/63] Run the build integ test in parallel (#39788) * Run the build integ test in parallel Because the randomized runner lives in buildSrc, we run these tests with the Gradle runner, and had no parallelism configured so far. * Handle Windows and "auto" better --- buildSrc/build.gradle | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 9f658c91ab394..be54b2c68f639 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -232,6 +232,11 @@ if (project != rootProject) { if (isLuceneSnapshot) { systemProperty 'test.lucene-snapshot-revision', isLuceneSnapshot[0][1] } + String defaultParallel = System.getProperty('tests.jvms', project.rootProject.ext.defaultParallel) + if (defaultParallel == "auto") { + defaultParallel = Math.max(Runtime.getRuntime().availableProcessors(), 4) + } + maxParallelForks defaultParallel as Integer } check.dependsOn(integTest) From 33af548df7c3fccef38eb8a0b34ee2a808800840 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 29 Mar 2019 08:10:14 +0200 Subject: [PATCH 18/63] Remove Gradle deprecation warnings (#40449) * Remove Gradle deprecation warnings Turns out that without the explicit project, some how the deprecated properties were being red. With this change we have no Gradle deprecation warnings. --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index d452170b5fb90..f6c3222a4074a 100644 --- a/build.gradle +++ b/build.gradle @@ -32,7 +32,7 @@ plugins { id 'com.gradle.build-scan' version '2.0.2' id 'base' } -if (properties.get("org.elasticsearch.acceptScanTOS", "false") == "true") { +if (Boolean.valueOf(project.findProperty('org.elasticsearch.acceptScanTOS') ?: "false")) { buildScan { termsOfServiceUrl = 'https://gradle.com/terms-of-service' termsOfServiceAgree = 'yes' From 7b6e7146eaacee36d3ec7932b85761ed01dd5ca8 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Fri, 29 Mar 2019 18:04:49 +1100 Subject: [PATCH 19/63] Remove obsolete security settings (#40496) Removes the deprecated accept_default_password setting. This setting become redundant when default passwords were removed from 6.0, but the setting was kept for BWC. Removes native role store cache settings. These have been unused since 5.2 but were kept for BWC. --- docs/reference/migration/migrate_8_0.asciidoc | 2 ++ .../migration/migrate_8_0/security.asciidoc | 18 ++++++++++++++++++ .../elasticsearch/xpack/security/Security.java | 1 - .../security/authc/esnative/ReservedRealm.java | 5 ----- .../security/authz/store/NativeRolesStore.java | 14 -------------- 5 files changed, 20 insertions(+), 20 deletions(-) create mode 100644 docs/reference/migration/migrate_8_0/security.asciidoc diff --git a/docs/reference/migration/migrate_8_0.asciidoc b/docs/reference/migration/migrate_8_0.asciidoc index 789a2d5fa0ce4..0c695a3b2bb47 100644 --- a/docs/reference/migration/migrate_8_0.asciidoc +++ b/docs/reference/migration/migrate_8_0.asciidoc @@ -15,6 +15,7 @@ coming[8.0.0] * <> * <> * <> +* <> //NOTE: The notable-breaking-changes tagged regions are re-used in the //Installation and Upgrade Guide @@ -41,3 +42,4 @@ include::migrate_8_0/analysis.asciidoc[] include::migrate_8_0/discovery.asciidoc[] include::migrate_8_0/mappings.asciidoc[] include::migrate_8_0/snapshots.asciidoc[] +include::migrate_8_0/security.asciidoc[] diff --git a/docs/reference/migration/migrate_8_0/security.asciidoc b/docs/reference/migration/migrate_8_0/security.asciidoc new file mode 100644 index 0000000000000..e09d21764f740 --- /dev/null +++ b/docs/reference/migration/migrate_8_0/security.asciidoc @@ -0,0 +1,18 @@ +[float] +[[breaking_80_security_changes]] +=== Security changes + +[float] +==== The `accept_default_password` setting has been removed + +The `xpack.security.authc.accept_default_password` setting has not had any affect +since the 6.0 release of {es}. It has been removed and cannot be used. + +[float] +==== The `roles.index.cache.*` settings have been removed + +The `xpack.security.authz.store.roles.index.cache.max_size` and +`xpack.security.authz.store.roles.index.cache.ttl` settings have +been removed. These settings have been redundant and deprecated +since the 5.2 release of {es}. + diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 7b7e72fdd6b98..c6f269b1edd4d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -618,7 +618,6 @@ public static List> getSettings(boolean transportClientMode, List ACCEPT_DEFAULT_PASSWORD_SETTING = Setting.boolSetting( - SecurityField.setting("authc.accept_default_password"), true, Setting.Property.NodeScope, Setting.Property.Filtered, - Setting.Property.Deprecated); public static final Setting BOOTSTRAP_ELASTIC_PASSWORD = SecureSetting.secureString("bootstrap.password", KeyStoreWrapper.SEED_SETTING); @@ -250,7 +246,6 @@ private Version getDefinedVersion(String username) { } public static void addSettings(List> settingsList) { - settingsList.add(ACCEPT_DEFAULT_PASSWORD_SETTING); settingsList.add(BOOTSTRAP_ELASTIC_PASSWORD); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index b82bf7f3c7fc2..c0ec72277d870 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -26,10 +26,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -69,7 +66,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; -import static org.elasticsearch.xpack.core.security.SecurityField.setting; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ROLE_TYPE; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; @@ -83,11 +79,6 @@ */ public class NativeRolesStore implements BiConsumer, ActionListener> { - // these are no longer used, but leave them around for users upgrading - private static final Setting CACHE_SIZE_SETTING = - Setting.intSetting(setting("authz.store.roles.index.cache.max_size"), 10000, Property.NodeScope, Property.Deprecated); - private static final Setting CACHE_TTL_SETTING = Setting.timeSetting(setting("authz.store.roles.index.cache.ttl"), - TimeValue.timeValueMinutes(20), Property.NodeScope, Property.Deprecated); private static final Logger logger = LogManager.getLogger(NativeRolesStore.class); private final Settings settings; @@ -413,11 +404,6 @@ static RoleDescriptor transformRole(String id, BytesReference sourceBytes, Logge } } - public static void addSettings(List> settings) { - settings.add(CACHE_SIZE_SETTING); - settings.add(CACHE_TTL_SETTING); - } - /** * Gets the document's id field for the given role name. */ From 1f392a7d63d74cb65b67c9441be0c45c54ede411 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Fri, 29 Mar 2019 18:06:47 +1100 Subject: [PATCH 20/63] Support roles with application privileges against wildcard applications (#40398) This commit introduces 2 changes to application privileges: - The validation rules now accept a wildcard in the "suffix" of an application name. Wildcards were always accepted in the application name, but the "valid filename" check for the suffix incorrectly prevented the use of wildcards there. - A role may now be defined against a wildcard application (e.g. kibana-*) and this will be correctly treated as granting the named privileges against all named applications. This does not allow wildcard application names in the body of a "has-privileges" check, but the "has-privileges" check can test concrete application names against roles with wildcards. --- .../permission/ApplicationPermission.java | 21 ++-- .../authz/privilege/ApplicationPrivilege.java | 41 +++++-- .../ApplicationPrivilegeDescriptor.java | 7 ++ .../ApplicationPermissionTests.java | 33 +++++- .../privilege/ApplicationPrivilegeTests.java | 34 ++++-- .../authz/store/CompositeRolesStore.java | 4 +- .../authz/store/NativePrivilegeStore.java | 58 +++++++++- .../store/NativePrivilegeStoreTests.java | 39 +++++++ .../privileges/20_has_application_privs.yml | 103 ++++++++++++++++++ 9 files changed, 302 insertions(+), 38 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index 0cd4e8a8b0ddc..da6af4ec7cbdc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -104,15 +104,18 @@ public ResourcePrivilegesMap checkResourcePrivileges(final String applicationNam for (String checkResource : checkForResources) { for (String checkPrivilegeName : checkForPrivilegeNames) { final Set nameSet = Collections.singleton(checkPrivilegeName); - final ApplicationPrivilege checkPrivilege = ApplicationPrivilege.get(applicationName, nameSet, storedPrivileges); - assert checkPrivilege.getApplication().equals(applicationName) : "Privilege " + checkPrivilege + " should have application " - + applicationName; - assert checkPrivilege.name().equals(nameSet) : "Privilege " + checkPrivilege + " should have name " + nameSet; - - if (grants(checkPrivilege, checkResource)) { - resourcePrivilegesMapBuilder.addResourcePrivilege(checkResource, checkPrivilegeName, Boolean.TRUE); - } else { - resourcePrivilegesMapBuilder.addResourcePrivilege(checkResource, checkPrivilegeName, Boolean.FALSE); + final Set checkPrivileges = ApplicationPrivilege.get(applicationName, nameSet, storedPrivileges); + logger.trace("Resolved privileges [{}] for [{},{}]", checkPrivileges, applicationName, nameSet); + for (ApplicationPrivilege checkPrivilege : checkPrivileges) { + assert Automatons.predicate(applicationName).test(checkPrivilege.getApplication()) : "Privilege " + checkPrivilege + + " should have application " + applicationName; + assert checkPrivilege.name().equals(nameSet) : "Privilege " + checkPrivilege + " should have name " + nameSet; + + if (grants(checkPrivilege, checkResource)) { + resourcePrivilegesMapBuilder.addResourcePrivilege(checkResource, checkPrivilegeName, Boolean.TRUE); + } else { + resourcePrivilegesMapBuilder.addResourcePrivilege(checkResource, checkPrivilegeName, Boolean.FALSE); + } } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilege.java index 13db17a63bb0d..c4460b000e6d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilege.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.security.authz.privilege; import org.elasticsearch.common.Strings; +import org.elasticsearch.xpack.core.security.support.Automatons; import java.util.Arrays; import java.util.Collection; @@ -15,6 +16,7 @@ import java.util.Objects; import java.util.Set; import java.util.function.Function; +import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -101,7 +103,7 @@ private static void validateApplicationName(String application, boolean allowWil if (allowWildcard == false) { throw new IllegalArgumentException("Application names may not contain '*' (found '" + application + "')"); } - if(application.equals("*")) { + if (application.equals("*")) { // this is allowed and short-circuiting here makes the later validation simpler return; } @@ -128,7 +130,10 @@ private static void validateApplicationName(String application, boolean allowWil } if (parts.length > 1) { - final String suffix = parts[1]; + String suffix = parts[1]; + if (allowWildcard && suffix.endsWith("*")) { + suffix = suffix.substring(0, suffix.length() - 1); + } if (Strings.validFileName(suffix) == false) { throw new IllegalArgumentException("An application name suffix may not contain any of the characters '" + Strings.collectionToDelimitedString(Strings.INVALID_FILENAME_CHARS, "") + "' (found '" + suffix + "')"); @@ -165,20 +170,38 @@ public static void validatePrivilegeOrActionName(String name) { } /** - * Finds or creates an application privileges with the provided names. + * Finds or creates a collection of application privileges with the provided names. + * If application is a wildcard, it will be expanded to all matching application names in {@code stored} * Each element in {@code name} may be the name of a stored privilege (to be resolved from {@code stored}, or a bespoke action pattern. */ - public static ApplicationPrivilege get(String application, Set name, Collection stored) { + public static Set get(String application, Set name, Collection stored) { if (name.isEmpty()) { - return NONE.apply(application); + return Collections.singleton(NONE.apply(application)); + } else if (application.contains("*")) { + Predicate predicate = Automatons.predicate(application); + final Set result = stored.stream() + .map(ApplicationPrivilegeDescriptor::getApplication) + .filter(predicate) + .distinct() + .map(appName -> resolve(appName, name, stored)) + .collect(Collectors.toSet()); + if (result.isEmpty()) { + return Collections.singleton(resolve(application, name, Collections.emptyMap())); + } else { + return result; + } } else { - Map lookup = stored.stream() - .filter(apd -> apd.getApplication().equals(application)) - .collect(Collectors.toMap(ApplicationPrivilegeDescriptor::getName, Function.identity())); - return resolve(application, name, lookup); + return Collections.singleton(resolve(application, name, stored)); } } + private static ApplicationPrivilege resolve(String application, Set name, Collection stored) { + final Map lookup = stored.stream() + .filter(apd -> apd.getApplication().equals(application)) + .collect(Collectors.toMap(ApplicationPrivilegeDescriptor::getName, Function.identity())); + return resolve(application, name, lookup); + } + private static ApplicationPrivilege resolve(String application, Set names, Map lookup) { final int size = names.size(); if (size == 0) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptor.java index 85d6aad3e3560..613f64f93b54a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeDescriptor.java @@ -23,6 +23,8 @@ import java.util.Objects; import java.util.Set; +import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; + /** * An {@code ApplicationPrivilegeDescriptor} is a representation of a stored {@link ApplicationPrivilege}. * A user (via a role) can be granted an application privilege by name (e.g. ("myapp", "read"). @@ -104,6 +106,11 @@ public XContentBuilder toXContent(XContentBuilder builder, boolean includeTypeFi return builder.endObject(); } + @Override + public String toString() { + return getClass().getSimpleName() + "{[" + application + "],[" + name + "],[" + collectionToCommaDelimitedString(actions) + "]}"; + } + /** * Construct a new {@link ApplicationPrivilegeDescriptor} from XContent. * diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermissionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermissionTests.java index 992ca8db1b083..0f81b872b86d4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermissionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermissionTests.java @@ -13,15 +13,16 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; public class ApplicationPermissionTests extends ESTestCase { @@ -34,6 +35,7 @@ public class ApplicationPermissionTests extends ESTestCase { private ApplicationPrivilege app1Delete = storePrivilege("app1", "delete", "write/delete"); private ApplicationPrivilege app1Create = storePrivilege("app1", "create", "write/create"); private ApplicationPrivilege app2Read = storePrivilege("app2", "read", "read/*"); + private ApplicationPrivilege otherAppRead = storePrivilege("other-app", "read", "read/*"); private ApplicationPrivilege storePrivilege(String app, String name, String... patterns) { store.add(new ApplicationPrivilegeDescriptor(app, name, Sets.newHashSet(patterns), Collections.emptyMap())); @@ -104,6 +106,16 @@ public void testDoesNotMatchAcrossApplications() { assertThat(buildPermission(app1All, "*").grants(app2Read, "123"), equalTo(false)); } + public void testMatchingWithWildcardApplicationNames() { + final Set readAllApp = ApplicationPrivilege.get("app*", Collections.singleton("read"), store); + assertThat(buildPermission(readAllApp, "*").grants(app1Read, "123"), equalTo(true)); + assertThat(buildPermission(readAllApp, "foo/*").grants(app2Read, "foo/bar"), equalTo(true)); + + assertThat(buildPermission(readAllApp, "*").grants(app1Write, "123"), equalTo(false)); + assertThat(buildPermission(readAllApp, "foo/*").grants(app2Read, "bar/baz"), equalTo(false)); + assertThat(buildPermission(readAllApp, "*").grants(otherAppRead, "abc"), equalTo(false)); + } + public void testMergedPermissionChecking() { final ApplicationPrivilege app1ReadWrite = compositePrivilege("app1", app1Read, app1Write); final ApplicationPermission hasPermission = buildPermission(app1ReadWrite, "allow/*"); @@ -138,16 +150,27 @@ public void testInspectPermissionContents() { } private ApplicationPrivilege actionPrivilege(String appName, String... actions) { - return ApplicationPrivilege.get(appName, Sets.newHashSet(actions), Collections.emptyList()); + final Set privileges = ApplicationPrivilege.get(appName, Sets.newHashSet(actions), Collections.emptyList()); + assertThat(privileges, hasSize(1)); + return privileges.iterator().next(); } private ApplicationPrivilege compositePrivilege(String application, ApplicationPrivilege... children) { Set names = Stream.of(children).map(ApplicationPrivilege::name).flatMap(Set::stream).collect(Collectors.toSet()); - return ApplicationPrivilege.get(application, names, store); + final Set privileges = ApplicationPrivilege.get(application, names, store); + assertThat(privileges, hasSize(1)); + return privileges.iterator().next(); } - private ApplicationPermission buildPermission(ApplicationPrivilege privilege, String... resources) { - return new ApplicationPermission(singletonList(new Tuple<>(privilege, Sets.newHashSet(resources)))); + return buildPermission(Collections.singleton(privilege), resources); + } + + private ApplicationPermission buildPermission(Collection privileges, String... resources) { + final Set resourceSet = Sets.newHashSet(resources); + final List>> privilegesAndResources = privileges.stream() + .map(p -> new Tuple<>(p, resourceSet)) + .collect(Collectors.toList()); + return new ApplicationPermission(privilegesAndResources); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeTests.java index c65f06f05f957..cd917ed81f16e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ApplicationPrivilegeTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.hamcrest.Matchers; import org.junit.Assert; import java.util.Arrays; @@ -22,9 +23,11 @@ import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.iterableWithSize; public class ApplicationPrivilegeTests extends ESTestCase { @@ -59,6 +62,12 @@ public void testValidationOfApplicationName() { assertNoException(app, () -> ApplicationPrivilege.validateApplicationName(app)); assertNoException(app, () -> ApplicationPrivilege.validateApplicationNameOrWildcard(app)); } + + // wildcards in the suffix + for (String app : Arrays.asList("app1-*", "app1-foo*", "app1-.*", "app1-.foo.*", appNameWithSpecialChars + "*")) { + assertValidationFailure(app, "application name", () -> ApplicationPrivilege.validateApplicationName(app)); + assertNoException(app, () -> ApplicationPrivilege.validateApplicationNameOrWildcard(app)); + } } public void testValidationOfPrivilegeName() { @@ -101,16 +110,23 @@ public void testNonePrivilege() { } public void testGetPrivilegeByName() { - final ApplicationPrivilegeDescriptor descriptor = descriptor("my-app", "read", "data:read/*", "action:login"); + final ApplicationPrivilegeDescriptor myRead = descriptor("my-app", "read", "data:read/*", "action:login"); final ApplicationPrivilegeDescriptor myWrite = descriptor("my-app", "write", "data:write/*", "action:login"); final ApplicationPrivilegeDescriptor myAdmin = descriptor("my-app", "admin", "data:read/*", "action:*"); final ApplicationPrivilegeDescriptor yourRead = descriptor("your-app", "read", "data:read/*", "action:login"); - final Set stored = Sets.newHashSet(descriptor, myWrite, myAdmin, yourRead); + final Set stored = Sets.newHashSet(myRead, myWrite, myAdmin, yourRead); + + final Set myAppRead = ApplicationPrivilege.get("my-app", Collections.singleton("read"), stored); + assertThat(myAppRead, iterableWithSize(1)); + assertPrivilegeEquals(myAppRead.iterator().next(), myRead); - assertEqual(ApplicationPrivilege.get("my-app", Collections.singleton("read"), stored), descriptor); - assertEqual(ApplicationPrivilege.get("my-app", Collections.singleton("write"), stored), myWrite); + final Set myAppWrite = ApplicationPrivilege.get("my-app", Collections.singleton("write"), stored); + assertThat(myAppWrite, iterableWithSize(1)); + assertPrivilegeEquals(myAppWrite.iterator().next(), myWrite); - final ApplicationPrivilege readWrite = ApplicationPrivilege.get("my-app", Sets.newHashSet("read", "write"), stored); + final Set myReadWrite = ApplicationPrivilege.get("my-app", Sets.newHashSet("read", "write"), stored); + assertThat(myReadWrite, Matchers.hasSize(1)); + final ApplicationPrivilege readWrite = myReadWrite.iterator().next(); assertThat(readWrite.getApplication(), equalTo("my-app")); assertThat(readWrite.name(), containsInAnyOrder("read", "write")); assertThat(readWrite.getPatterns(), arrayContainingInAnyOrder("data:read/*", "data:write/*", "action:login")); @@ -124,10 +140,10 @@ public void testGetPrivilegeByName() { } } - private void assertEqual(ApplicationPrivilege myReadPriv, ApplicationPrivilegeDescriptor myRead) { - assertThat(myReadPriv.getApplication(), equalTo(myRead.getApplication())); - assertThat(getPrivilegeName(myReadPriv), equalTo(myRead.getName())); - assertThat(Sets.newHashSet(myReadPriv.getPatterns()), equalTo(myRead.getActions())); + private void assertPrivilegeEquals(ApplicationPrivilege privilege, ApplicationPrivilegeDescriptor descriptor) { + assertThat(privilege.getApplication(), equalTo(descriptor.getApplication())); + assertThat(privilege.name(), contains(descriptor.getName())); + assertThat(Sets.newHashSet(privilege.getPatterns()), equalTo(descriptor.getActions())); } private ApplicationPrivilegeDescriptor descriptor(String application, String name, String... actions) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index 2d1d4a98b4ba6..48659b8968661 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -402,8 +402,8 @@ public static void buildRoleFromDescriptors(Collection roleDescr .flatMap(Collection::stream) .collect(Collectors.toSet()); privilegeStore.getPrivileges(applicationNames, applicationPrivilegeNames, ActionListener.wrap(appPrivileges -> { - applicationPrivilegesMap.forEach((key, names) -> - builder.addApplicationPrivilege(ApplicationPrivilege.get(key.v1(), names, appPrivileges), key.v2())); + applicationPrivilegesMap.forEach((key, names) -> ApplicationPrivilege.get(key.v1(), names, appPrivileges) + .forEach(priv -> builder.addApplicationPrivilege(priv, key.v2()))); listener.onResponse(builder.build()); }, listener::onFailure)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java index 09c89752f8314..19694bb003314 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -34,9 +34,11 @@ import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.security.ScrollHelper; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheRequest; @@ -46,6 +48,7 @@ import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.io.IOException; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -62,6 +65,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor.DOC_TYPE_VALUE; +import static org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor.Fields.APPLICATION; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; /** @@ -97,7 +101,7 @@ public void getPrivileges(Collection applications, Collection na listener.onResponse(Collections.emptyList()); } else if (frozenSecurityIndex.isAvailable() == false) { listener.onFailure(frozenSecurityIndex.getUnavailableReason()); - } else if (applications != null && applications.size() == 1 && names != null && names.size() == 1) { + } else if (isSinglePrivilegeMatch(applications, names)) { getPrivilege(Objects.requireNonNull(Iterables.get(applications, 0)), Objects.requireNonNull(Iterables.get(names, 0)), ActionListener.wrap(privilege -> listener.onResponse(privilege == null ? Collections.emptyList() : Collections.singletonList(privilege)), @@ -110,11 +114,14 @@ public void getPrivileges(Collection applications, Collection na if (isEmpty(applications) && isEmpty(names)) { query = typeQuery; } else if (isEmpty(names)) { - query = QueryBuilders.boolQuery().filter(typeQuery).filter( - QueryBuilders.termsQuery(ApplicationPrivilegeDescriptor.Fields.APPLICATION.getPreferredName(), applications)); + query = QueryBuilders.boolQuery().filter(typeQuery).filter(getApplicationNameQuery(applications)); } else if (isEmpty(applications)) { query = QueryBuilders.boolQuery().filter(typeQuery) - .filter(QueryBuilders.termsQuery(ApplicationPrivilegeDescriptor.Fields.NAME.getPreferredName(), names)); + .filter(getPrivilegeNameQuery(names)); + } else if (hasWildcard(applications)) { + query = QueryBuilders.boolQuery().filter(typeQuery) + .filter(getApplicationNameQuery(applications)) + .filter(getPrivilegeNameQuery(names)); } else { final String[] docIds = applications.stream() .flatMap(a -> names.stream().map(n -> toDocId(a, n))) @@ -139,6 +146,49 @@ public void getPrivileges(Collection applications, Collection na } } + private boolean isSinglePrivilegeMatch(Collection applications, Collection names) { + return applications != null && applications.size() == 1 && hasWildcard(applications) == false && names != null && names.size() == 1; + } + + private boolean hasWildcard(Collection applications) { + return applications.stream().anyMatch(n -> n.endsWith("*")); + } + + private QueryBuilder getPrivilegeNameQuery(Collection names) { + return QueryBuilders.termsQuery(ApplicationPrivilegeDescriptor.Fields.NAME.getPreferredName(), names); + } + + private QueryBuilder getApplicationNameQuery(Collection applications) { + if (applications.contains("*")) { + return QueryBuilders.existsQuery(APPLICATION.getPreferredName()); + } + final List rawNames = new ArrayList<>(applications.size()); + final List wildcardNames = new ArrayList<>(applications.size()); + for (String name : applications) { + if (name.endsWith("*")) { + wildcardNames.add(name); + } else { + rawNames.add(name); + } + } + + assert rawNames.isEmpty() == false || wildcardNames.isEmpty() == false; + + TermsQueryBuilder termsQuery = rawNames.isEmpty() ? null : QueryBuilders.termsQuery(APPLICATION.getPreferredName(), rawNames); + if (wildcardNames.isEmpty()) { + return termsQuery; + } + final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); + if (termsQuery != null) { + boolQuery.filter(termsQuery); + } + for (String wildcard : wildcardNames) { + final String prefix = wildcard.substring(0, wildcard.length() - 1); + boolQuery.filter(QueryBuilders.prefixQuery(APPLICATION.getPreferredName(), prefix)); + } + return boolQuery; + } + private static boolean isEmpty(Collection collection) { return collection == null || collection.isEmpty(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index 8f60b1d30523f..a833748854943 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -181,6 +181,45 @@ public void testGetPrivilegesByApplicationName() throws Exception { assertResult(sourcePrivileges, future); } + public void testGetPrivilegesByWildcardApplicationName() throws Exception { + final PlainActionFuture> future = new PlainActionFuture<>(); + store.getPrivileges(Arrays.asList("myapp-*", "yourapp"), null, future); + assertThat(requests, iterableWithSize(1)); + assertThat(requests.get(0), instanceOf(SearchRequest.class)); + SearchRequest request = (SearchRequest) requests.get(0); + assertThat(request.indices(), arrayContaining(SecurityIndexManager.SECURITY_INDEX_NAME)); + + final String query = Strings.toString(request.source().query()); + assertThat(query, containsString("{\"bool\":{\"filter\":[{\"terms\":{\"application\":[\"yourapp\"]")); + assertThat(query, containsString("{\"prefix\":{\"application\":{\"value\":\"myapp-\"")); + assertThat(query, containsString("{\"term\":{\"type\":{\"value\":\"application-privilege\"")); + + final SearchHit[] hits = new SearchHit[0]; + listener.get().onResponse(new SearchResponse(new SearchResponseSections( + new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, null, false, false, null, 1), + "_scrollId1", 1, 1, 0, 1, null, null)); + } + + public void testGetPrivilegesByStarApplicationName() throws Exception { + final PlainActionFuture> future = new PlainActionFuture<>(); + store.getPrivileges(Arrays.asList("*", "anything"), null, future); + assertThat(requests, iterableWithSize(1)); + assertThat(requests.get(0), instanceOf(SearchRequest.class)); + SearchRequest request = (SearchRequest) requests.get(0); + assertThat(request.indices(), arrayContaining(SecurityIndexManager.SECURITY_INDEX_NAME)); + + final String query = Strings.toString(request.source().query()); + assertThat(query, containsString("{\"exists\":{\"field\":\"application\"")); + assertThat(query, containsString("{\"term\":{\"type\":{\"value\":\"application-privilege\"")); + + final SearchHit[] hits = new SearchHit[0]; + listener.get().onResponse(new SearchResponse(new SearchResponseSections( + new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, null, false, false, null, 1), + "_scrollId1", 1, 1, 0, 1, null, null)); + } + public void testGetAllPrivileges() throws Exception { final List sourcePrivileges = Arrays.asList( new ApplicationPrivilegeDescriptor("app1", "admin", newHashSet("action:admin/*", "action:login", "data:read/*"), emptyMap()), diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml index 85ac286c3f025..eb92cc252b560 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml @@ -28,6 +28,16 @@ setup: "name": "write", "actions": [ "data:write/*" ] } + }, + "yourapp-v1" : { + "read": { + "actions": [ "data:read/*" ] + } + }, + "yourapp-v2" : { + "read": { + "actions": [ "data:read/*" ] + } } } @@ -83,6 +93,21 @@ setup: } ] } + - do: + security.put_role: + name: "yourapp_read_config" + body: > + { + "cluster": [], + "indices": [], + "applications": [ + { + "application": "yourapp-*", + "privileges": ["read"], + "resources": ["settings/*"] + } + ] + } # And a user for each role - do: @@ -101,6 +126,14 @@ setup: "password": "p@ssw0rd", "roles" : [ "myapp_engineering_write" ] } + - do: + security.put_user: + username: "your_read" + body: > + { + "password": "p@ssw0rd", + "roles" : [ "yourapp_read_config" ] + } --- teardown: @@ -109,6 +142,16 @@ teardown: application: myapp name: "user,read,write" ignore: 404 + - do: + security.delete_privileges: + application: yourapp-v1 + name: "read" + ignore: 404 + - do: + security.delete_privileges: + application: yourapp-v2 + name: "read" + ignore: 404 - do: security.delete_user: @@ -120,6 +163,11 @@ teardown: username: "eng_write" ignore: 404 + - do: + security.delete_user: + username: "your_read" + ignore: 404 + - do: security.delete_role: name: "myapp_engineering_read" @@ -129,6 +177,11 @@ teardown: security.delete_role: name: "myapp_engineering_write" ignore: 404 + + - do: + security.delete_role: + name: "yourapp_read_config" + ignore: 404 --- "Test has_privileges with application-privileges": - do: @@ -188,3 +241,53 @@ teardown: } } } + - do: + headers: { Authorization: "Basic eW91cl9yZWFkOnBAc3N3MHJk" } # your_read + security.has_privileges: + user: null + body: > + { + "application": [ + { + "application" : "yourapp-v1", + "resources" : [ "settings/host", "settings/port", "system/key" ], + "privileges" : [ "data:read/settings", "data:write/settings", "read", "write" ] + }, + { + "application" : "yourapp-v2", + "resources" : [ "settings/host" ], + "privileges" : [ "data:read/settings", "data:write/settings" ] + } + ] + } + + - match: { "username" : "your_read" } + - match: { "has_all_requested" : false } + - match: { "application": { + "yourapp-v1": { + "settings/host": { + "data:read/settings": true, + "data:write/settings": false, + "read": true, + "write": false + }, + "settings/port": { + "data:read/settings": true, + "data:write/settings": false, + "read": true, + "write": false + }, + "system/key": { + "data:read/settings": false, + "data:write/settings": false, + "read": false, + "write": false + } + }, + "yourapp-v2": { + "settings/host": { + "data:read/settings": true, + "data:write/settings": false, + } + } + } } From ae569a286dd0acb85152723eb207f7974bd27ea2 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 29 Mar 2019 09:01:36 +0100 Subject: [PATCH 21/63] Fix merging of search_as_you_type field mapper (#40593) The merge of the `search_as_you_type` field mapper uses the wrong prefix field and does not update the underlying field types. --- .../mapper/SearchAsYouTypeFieldMapper.java | 16 +++++- .../SearchAsYouTypeFieldMapperTests.java | 55 +++++++++++++++++++ 2 files changed, 68 insertions(+), 3 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java index 69948bf98a6ac..867e975e9f51c 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapper.java @@ -516,7 +516,7 @@ protected void parseCreateField(ParseContext context, List field @Override protected String contentType() { - return CONTENT_TYPE; + return "shingle"; } } @@ -663,6 +663,16 @@ public SearchAsYouTypeFieldMapper(String simpleName, this.maxShingleSize = maxShingleSize; } + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + SearchAsYouTypeFieldMapper fieldMapper = (SearchAsYouTypeFieldMapper) super.updateFieldType(fullNameToFieldType); + fieldMapper.prefixField = (PrefixFieldMapper) fieldMapper.prefixField.updateFieldType(fullNameToFieldType); + for (int i = 0; i < fieldMapper.shingleFields.length; i++) { + fieldMapper.shingleFields[i] = (ShingleFieldMapper) fieldMapper.shingleFields[i].updateFieldType(fullNameToFieldType); + } + return fieldMapper; + } + @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { final String value = context.externalValueSet() ? context.externalValue().toString() : context.parser().textOrNull(); @@ -692,10 +702,10 @@ protected void doMerge(Mapper mergeWith) { super.doMerge(mergeWith); SearchAsYouTypeFieldMapper mw = (SearchAsYouTypeFieldMapper) mergeWith; if (mw.maxShingleSize != maxShingleSize) { - throw new IllegalArgumentException("mapper [" + name() + "] has different maxShingleSize setting, current [" + throw new IllegalArgumentException("mapper [" + name() + "] has different [max_shingle_size] setting, current [" + this.maxShingleSize + "], merged [" + mw.maxShingleSize + "]"); } - this.prefixField = (PrefixFieldMapper) this.prefixField.merge(mw); + this.prefixField = (PrefixFieldMapper) this.prefixField.merge(mw.prefixField); ShingleFieldMapper[] shingleFieldMappers = new ShingleFieldMapper[mw.shingleFields.length]; for (int i = 0; i < shingleFieldMappers.length; i++) { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapperTests.java index 9ed43a9505624..4622b34ea1514 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SearchAsYouTypeFieldMapperTests.java @@ -71,6 +71,7 @@ import static java.util.Arrays.asList; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasProperty; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -180,6 +181,60 @@ public void testConfiguration() throws IOException { getShingleFieldMapper(defaultMapper, "a_field._4gram").fieldType(), 4, analyzerName, prefixFieldMapper.fieldType()); } + public void testSimpleMerge() throws IOException { + MapperService mapperService = createIndex("test").mapperService(); + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "search_as_you_type") + .field("analyzer", "standard") + .endObject() + .endObject() + .endObject().endObject()); + DocumentMapper mapper = mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "search_as_you_type") + .field("analyzer", "standard") + .endObject() + .startObject("b_field") + .field("type", "text") + .endObject() + .endObject() + .endObject().endObject()); + DocumentMapper mapper = mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "search_as_you_type") + .field("analyzer", "standard") + .field("max_shingle_size", "4") + .endObject() + .startObject("b_field") + .field("type", "text") + .endObject() + .endObject() + .endObject().endObject()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)); + assertThat(e.getMessage(), containsString("different [max_shingle_size]")); + } + } + public void testIndexOptions() throws IOException { final String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() From 53df53ada80cd2249e20bf00a18defbf16aaa8b3 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 29 Mar 2019 09:34:51 +0100 Subject: [PATCH 22/63] Update docs for the DFR similarity (#40579) The basic models `b, de, p` and the after effect `no` are not available anymore in Lucene 8 but they are still listed in the >7x documentation. This change removes these references that should also be listed in the breaking change of es 7.0. Closes #40264 --- .../index-modules/similarity.asciidoc | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index 014923d463cbd..ee6cf3958375b 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -92,22 +92,14 @@ from randomness] framework. This similarity has the following options: [horizontal] `basic_model`:: - Possible values: {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelG.html[`be`], - {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelD.html[`d`], - {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelG.html[`g`], + Possible values: {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelG.html[`g`], {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelIF.html[`if`], - {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelIn.html[`in`], - {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelIne.html[`ine`] and - {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelP.html[`p`]. - -`be`, `d` and `p` should be avoided in practice as they might return scores that -are equal to 0 or infinite with terms that do not meet the expected random -distribution. + {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelIn.html[`in`] and + {lucene-core-javadoc}/org/apache/lucene/search/similarities/BasicModelIne.html[`ine`]. `after_effect`:: - Possible values: {lucene-core-javadoc}/org/apache/lucene/search/similarities/AfterEffect.NoAfterEffect.html[`no`], - {lucene-core-javadoc}/org/apache/lucene/search/similarities/AfterEffectB.html[`b`] and - {lucene-core-javadoc}/org/apache/lucene/search/similarities/AfterEffectL.html[`l`]. + Possible values: {lucene-core-javadoc}/org/apache/lucene/search/similarities/AfterEffectB.html[`b`] and + {lucene-core-javadoc}/org/apache/lucene/search/similarities/AfterEffectB.html[`l`]. `normalization`:: Possible values: {lucene-core-javadoc}/org/apache/lucene/search/similarities/Normalization.NoNormalization.html[`no`], From c106c783a0cc3d4b57386cd4e83ae7ad74ef13de Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 29 Mar 2019 09:42:59 +0100 Subject: [PATCH 23/63] Remove -Xlint exclusions in the ingest-common module. (#40505) Fix the generics in processors extending AbstractStringProcessor and its factory. Relates to #40366 --- modules/ingest-common/build.gradle | 3 --- .../ingest/common/AbstractStringProcessor.java | 8 ++++---- .../org/elasticsearch/ingest/common/BytesProcessor.java | 2 +- .../org/elasticsearch/ingest/common/GsubProcessor.java | 6 +++--- .../elasticsearch/ingest/common/LowercaseProcessor.java | 2 +- .../org/elasticsearch/ingest/common/TrimProcessor.java | 2 +- .../elasticsearch/ingest/common/URLDecodeProcessor.java | 2 +- .../elasticsearch/ingest/common/UppercaseProcessor.java | 2 +- .../common/AbstractStringProcessorFactoryTestCase.java | 8 ++++---- .../ingest/common/AbstractStringProcessorTestCase.java | 6 +++--- .../elasticsearch/ingest/common/BytesProcessorTests.java | 4 ++-- .../ingest/common/GsubProcessorFactoryTests.java | 2 +- .../elasticsearch/ingest/common/GsubProcessorTests.java | 4 ++-- .../ingest/common/LowercaseProcessorTests.java | 4 ++-- .../elasticsearch/ingest/common/TrimProcessorTests.java | 4 ++-- .../ingest/common/URLDecodeProcessorTests.java | 4 ++-- .../ingest/common/UppercaseProcessorTests.java | 4 ++-- 17 files changed, 32 insertions(+), 35 deletions(-) diff --git a/modules/ingest-common/build.gradle b/modules/ingest-common/build.gradle index 1681258e7c7ee..b6179eb852ae9 100644 --- a/modules/ingest-common/build.gradle +++ b/modules/ingest-common/build.gradle @@ -29,9 +29,6 @@ dependencies { compile project(':libs:dissect') } -compileJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes" -compileTestJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes" - integTestCluster { module project(':modules:lang-painless') } \ No newline at end of file diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java index 792e5e4ebed2d..546519aa5f606 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java @@ -80,8 +80,8 @@ protected Factory(String processorType) { } @Override - public AbstractStringProcessor create(Map registry, String tag, - Map config) throws Exception { + public AbstractStringProcessor create(Map registry, String tag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(processorType, tag, config, "field"); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(processorType, tag, config, "ignore_missing", false); String targetField = ConfigurationUtils.readStringProperty(processorType, tag, config, "target_field", field); @@ -89,7 +89,7 @@ public AbstractStringProcessor create(Map registry, S return newProcessor(tag, config, field, ignoreMissing, targetField); } - protected abstract AbstractStringProcessor newProcessor(String processorTag, Map config, String field, - boolean ignoreMissing, String targetField); + protected abstract AbstractStringProcessor newProcessor(String processorTag, Map config, String field, + boolean ignoreMissing, String targetField); } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java index d07b56e1b3df5..8de75878f5fe5 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java @@ -27,7 +27,7 @@ * Processor that converts the content of string fields to the byte value. * Throws exception is the field is not of type string or can not convert to the numeric byte value */ -public final class BytesProcessor extends AbstractStringProcessor { +public final class BytesProcessor extends AbstractStringProcessor { public static final String TYPE = "bytes"; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java index 39553910692fc..9f3e656bba4b6 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java @@ -29,7 +29,7 @@ * Processor that allows to search for patterns in field content and replace them with corresponding string replacement. * Support fields of string type only, throws exception if a field is of a different type. */ -public final class GsubProcessor extends AbstractStringProcessor { +public final class GsubProcessor extends AbstractStringProcessor { public static final String TYPE = "gsub"; @@ -67,8 +67,8 @@ public Factory() { } @Override - protected AbstractStringProcessor newProcessor(String processorTag, Map config, String field, - boolean ignoreMissing, String targetField) { + protected GsubProcessor newProcessor(String processorTag, Map config, String field, + boolean ignoreMissing, String targetField) { String pattern = readStringProperty(TYPE, processorTag, config, "pattern"); String replacement = readStringProperty(TYPE, processorTag, config, "replacement"); Pattern searchPattern; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java index 4269cb05257f5..6c14dbdabba78 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java @@ -27,7 +27,7 @@ * Throws exception is the field is not of type string. */ -public final class LowercaseProcessor extends AbstractStringProcessor { +public final class LowercaseProcessor extends AbstractStringProcessor { public static final String TYPE = "lowercase"; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java index 98fe1223e5391..d1b3c87785424 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java @@ -25,7 +25,7 @@ * Processor that trims the content of string fields. * Throws exception is the field is not of type string. */ -public final class TrimProcessor extends AbstractStringProcessor { +public final class TrimProcessor extends AbstractStringProcessor { public static final String TYPE = "trim"; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java index fb6c5acf98b24..fa9d377714ee9 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java @@ -26,7 +26,7 @@ /** * Processor that URL-decodes a string */ -public final class URLDecodeProcessor extends AbstractStringProcessor { +public final class URLDecodeProcessor extends AbstractStringProcessor { public static final String TYPE = "urldecode"; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java index 6c428627c7d72..4503bfc02f71e 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java @@ -26,7 +26,7 @@ * Processor that converts the content of string fields to uppercase. * Throws exception is the field is not of type string. */ -public final class UppercaseProcessor extends AbstractStringProcessor { +public final class UppercaseProcessor extends AbstractStringProcessor { public static final String TYPE = "uppercase"; diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorFactoryTestCase.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorFactoryTestCase.java index 0465e24902842..ba6a2be73465e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorFactoryTestCase.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorFactoryTestCase.java @@ -37,7 +37,7 @@ protected Map modifyConfig(Map config) { return config; } - protected void assertProcessor(AbstractStringProcessor processor) {} + protected void assertProcessor(AbstractStringProcessor processor) {} public void testCreate() throws Exception { AbstractStringProcessor.Factory factory = newFactory(); @@ -47,7 +47,7 @@ public void testCreate() throws Exception { Map config = new HashMap<>(); config.put("field", fieldName); - AbstractStringProcessor processor = factory.create(null, processorTag, modifyConfig(config)); + AbstractStringProcessor processor = factory.create(null, processorTag, modifyConfig(config)); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo(fieldName)); assertThat(processor.isIgnoreMissing(), is(false)); @@ -64,7 +64,7 @@ public void testCreateWithIgnoreMissing() throws Exception { config.put("field", fieldName); config.put("ignore_missing", true); - AbstractStringProcessor processor = factory.create(null, processorTag, modifyConfig(config)); + AbstractStringProcessor processor = factory.create(null, processorTag, modifyConfig(config)); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo(fieldName)); assertThat(processor.isIgnoreMissing(), is(true)); @@ -82,7 +82,7 @@ public void testCreateWithTargetField() throws Exception { config.put("field", fieldName); config.put("target_field", targetFieldName); - AbstractStringProcessor processor = factory.create(null, processorTag, modifyConfig(config)); + AbstractStringProcessor processor = factory.create(null, processorTag, modifyConfig(config)); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo(fieldName)); assertThat(processor.isIgnoreMissing(), is(false)); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java index 4e4182bfdc891..f667f84e5d7b1 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java @@ -33,7 +33,7 @@ public abstract class AbstractStringProcessorTestCase extends ESTestCase { - protected abstract AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField); + protected abstract AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField); protected String modifyInput(String input) { return input; @@ -41,8 +41,8 @@ protected String modifyInput(String input) { protected abstract T expectedResult(String input); - protected Class expectedResultType(){ - return (Class) String.class; // most results types are Strings + protected Class expectedResultType(){ + return String.class; // most results types are Strings } public void testProcessor() throws Exception { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java index 788340a455a42..2520f3e5ad17f 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java @@ -29,12 +29,12 @@ import static org.hamcrest.Matchers.equalTo; -public class BytesProcessorTests extends AbstractStringProcessorTestCase { +public class BytesProcessorTests extends AbstractStringProcessorTestCase { private String modifiedInput; @Override - protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { + protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { return new BytesProcessor(randomAlphaOfLength(10), field, ignoreMissing, targetField); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java index 4a70b4686e0a6..0dadefbb4ee64 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java @@ -42,7 +42,7 @@ protected Map modifyConfig(Map config) { } @Override - protected void assertProcessor(AbstractStringProcessor processor) { + protected void assertProcessor(AbstractStringProcessor processor) { GsubProcessor gsubProcessor = (GsubProcessor) processor; assertThat(gsubProcessor.getPattern().toString(), equalTo("\\.")); assertThat(gsubProcessor.getReplacement(), equalTo("-")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java index 38d0202d3a1e2..9c003356c3dff 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java @@ -21,10 +21,10 @@ import java.util.regex.Pattern; -public class GsubProcessorTests extends AbstractStringProcessorTestCase { +public class GsubProcessorTests extends AbstractStringProcessorTestCase { @Override - protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { + protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { return new GsubProcessor(randomAlphaOfLength(10), field, Pattern.compile("\\."), "-", ignoreMissing, targetField); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorTests.java index 67a73669c0387..b804d3a0221c2 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorTests.java @@ -21,9 +21,9 @@ import java.util.Locale; -public class LowercaseProcessorTests extends AbstractStringProcessorTestCase { +public class LowercaseProcessorTests extends AbstractStringProcessorTestCase { @Override - protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { + protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { return new LowercaseProcessor(randomAlphaOfLength(10), field, ignoreMissing, targetField); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorTests.java index f0ae554f5cad1..abd7cae12fe91 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorTests.java @@ -19,10 +19,10 @@ package org.elasticsearch.ingest.common; -public class TrimProcessorTests extends AbstractStringProcessorTestCase { +public class TrimProcessorTests extends AbstractStringProcessorTestCase { @Override - protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { + protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { return new TrimProcessor(randomAlphaOfLength(10), field, ignoreMissing, targetField); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/URLDecodeProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/URLDecodeProcessorTests.java index 7697f1fcba3d4..150d594afd9af 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/URLDecodeProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/URLDecodeProcessorTests.java @@ -22,14 +22,14 @@ import java.io.UnsupportedEncodingException; import java.net.URLDecoder; -public class URLDecodeProcessorTests extends AbstractStringProcessorTestCase { +public class URLDecodeProcessorTests extends AbstractStringProcessorTestCase { @Override protected String modifyInput(String input) { return "Hello%20G%C3%BCnter" + input; } @Override - protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { + protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { return new URLDecodeProcessor(randomAlphaOfLength(10), field, ignoreMissing, targetField); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorTests.java index 76459f8116890..1b027c4380837 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorTests.java @@ -21,10 +21,10 @@ import java.util.Locale; -public class UppercaseProcessorTests extends AbstractStringProcessorTestCase { +public class UppercaseProcessorTests extends AbstractStringProcessorTestCase { @Override - protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { + protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { return new UppercaseProcessor(randomAlphaOfLength(10), field, ignoreMissing, targetField); } From 963c9b8092a6ba95d85c7d97b8a2d43169832367 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 29 Mar 2019 09:44:42 +0100 Subject: [PATCH 24/63] Update ingest jdocs that a null return value will drop the current document. (#40359) --- server/src/main/java/org/elasticsearch/ingest/Pipeline.java | 3 +++ server/src/main/java/org/elasticsearch/ingest/Processor.java | 3 +++ 2 files changed, 6 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java index fc5311be5cbde..218713383227e 100644 --- a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -89,6 +89,9 @@ public static Pipeline create(String id, Map config, /** * Modifies the data of a document to be indexed based on the processor this pipeline holds + * + * If null is returned then this document will be dropped and not indexed, otherwise + * this document will be kept and indexed. */ public IngestDocument execute(IngestDocument ingestDocument) throws Exception { long startTimeInNanos = relativeTimeProvider.getAsLong(); diff --git a/server/src/main/java/org/elasticsearch/ingest/Processor.java b/server/src/main/java/org/elasticsearch/ingest/Processor.java index 92b08bba77bf7..c064ddb35a129 100644 --- a/server/src/main/java/org/elasticsearch/ingest/Processor.java +++ b/server/src/main/java/org/elasticsearch/ingest/Processor.java @@ -39,6 +39,9 @@ public interface Processor { /** * Introspect and potentially modify the incoming data. + * + * @return If null is returned then the current document will be dropped and not be indexed, + * otherwise this document will be kept and indexed */ IngestDocument execute(IngestDocument ingestDocument) throws Exception; From 33917dd2c49fd7f0af20faa9eb24f9993efb2866 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 29 Mar 2019 12:07:02 +0200 Subject: [PATCH 25/63] Add ability to mute and mute flaky fixture (#40630) --- .../testfixtures/TestFixturesPlugin.java | 29 +++++++++++++++---- test/fixtures/krb5kdc-fixture/build.gradle | 3 ++ 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java index 57f77d6d1a256..3d64578e0147b 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java @@ -71,6 +71,8 @@ public void apply(Project project) { pullFixture.setEnabled(false); return; } + preProcessFixture.onlyIf(spec -> buildFixture.getEnabled()); + postProcessFixture.onlyIf(spec -> buildFixture.getEnabled()); project.apply(spec -> spec.plugin(BasePlugin.class)); project.apply(spec -> spec.plugin(DockerComposePlugin.class)); @@ -94,21 +96,26 @@ public void apply(Project project) { (name, port) -> postProcessFixture.getExtensions() .getByType(ExtraPropertiesExtension.class).set(name, port) ); + extension.fixtures.add(project); } - extension.fixtures.all(fixtureProject -> project.evaluationDependsOn(fixtureProject.getPath())); + extension.fixtures + .matching(fixtureProject -> fixtureProject.equals(project) == false) + .all(fixtureProject -> project.evaluationDependsOn(fixtureProject.getPath())); + + conditionTaskByType(tasks, extension, getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask")); + conditionTaskByType(tasks, extension, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask")); + conditionTaskByType(tasks, extension, TestingConventionsTasks.class); + conditionTaskByType(tasks, extension, ComposeUp.class); + if (dockerComposeSupported(project) == false) { project.getLogger().warn( "Tests for {} require docker-compose at /usr/local/bin/docker-compose or /usr/bin/docker-compose " + "but none could be found so these will be skipped", project.getPath() ); - disableTaskByType(tasks, getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask")); - disableTaskByType(tasks, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask")); - // conventions are not honored when the tasks are disabled - disableTaskByType(tasks, TestingConventionsTasks.class); - disableTaskByType(tasks, ComposeUp.class); return; } + tasks.withType(getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask"), task -> extension.fixtures.all(fixtureProject -> { fixtureProject.getTasks().matching(it -> it.getName().equals("buildFixture")).all(buildFixture -> @@ -128,6 +135,16 @@ public void apply(Project project) { } + private void conditionTaskByType(TaskContainer tasks, TestFixtureExtension extension, Class taskClass) { + tasks.withType( + taskClass, + task -> task.onlyIf(spec -> + extension.fixtures.stream() + .anyMatch(fixtureProject -> fixtureProject.getTasks().getByName("buildFixture").getEnabled() == false) == false + ) + ); + } + private void configureServiceInfoForTask(Task task, Project fixtureProject, BiConsumer consumer) { // Configure ports for the tests as system properties. // We only know these at execution time so we need to do it in doFirst diff --git a/test/fixtures/krb5kdc-fixture/build.gradle b/test/fixtures/krb5kdc-fixture/build.gradle index a3ca8d41bc4d9..1d224942f1862 100644 --- a/test/fixtures/krb5kdc-fixture/build.gradle +++ b/test/fixtures/krb5kdc-fixture/build.gradle @@ -47,6 +47,9 @@ postProcessFixture { } } +// https://github.com/elastic/elasticsearch/issues/40624 +buildFixture.enabled = false + project.ext.krb5Conf = { service -> file("$buildDir/shared/${service}/krb5.conf") } project.ext.krb5Keytabs = { service, fileName -> file("$buildDir/shared/${service}/keytabs/${fileName}") } From a99435d607a0b68f8c0dbcea7e93c60cc820a627 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 29 Mar 2019 14:13:18 +0200 Subject: [PATCH 26/63] Avoid building hdfs-fixure use an image that works instead Avoid the additional requirement for the debian package repos to be up, and depend on dockerhub only instead. --- test/fixtures/hdfs-fixture/Dockerfile | 8 -------- test/fixtures/hdfs-fixture/docker-compose.yml | 7 ++++--- 2 files changed, 4 insertions(+), 11 deletions(-) delete mode 100644 test/fixtures/hdfs-fixture/Dockerfile diff --git a/test/fixtures/hdfs-fixture/Dockerfile b/test/fixtures/hdfs-fixture/Dockerfile deleted file mode 100644 index b9d0e60b7d6d4..0000000000000 --- a/test/fixtures/hdfs-fixture/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM java:8-jre - -RUN apt-get update && apt-get install net-tools - -EXPOSE 9998 -EXPOSE 9999 - -CMD java -cp "/fixture:/fixture/*" hdfs.MiniHDFS /data \ No newline at end of file diff --git a/test/fixtures/hdfs-fixture/docker-compose.yml b/test/fixtures/hdfs-fixture/docker-compose.yml index 5bdc40b1f7246..ee86bf90fecee 100644 --- a/test/fixtures/hdfs-fixture/docker-compose.yml +++ b/test/fixtures/hdfs-fixture/docker-compose.yml @@ -2,10 +2,11 @@ version: '3' services: hdfs: hostname: hdfs.build.elastic.co - build: - context: . - dockerfile: Dockerfile + image: ubuntu:14.04 volumes: - ./build/fixture:/fixture ports: + # FIXME: Don't fix the host ports - "9999:9999" + - "9998:9999" + entrypoint: apt-get update && apt-get install net-tools && java -cp "/fixture:/fixture/*" hdfs.MiniHDFS /data From 789806f9387ebd7ee9cac80bd4eca287a6ec2aa0 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 29 Mar 2019 08:24:52 -0400 Subject: [PATCH 27/63] Add usage indicators for the bundled JDK (#40616) This commit adds indications whether or not a distribution is from the bundled JDK, and whether or not we are using the bundled JDK. --- distribution/archives/build.gradle | 4 +- distribution/build.gradle | 16 +++-- distribution/packages/build.gradle | 32 +++++----- distribution/src/bin/elasticsearch | 2 + distribution/src/bin/elasticsearch-env | 1 + distribution/src/bin/elasticsearch-env.bat | 1 + .../src/bin/elasticsearch-service.bat | 2 +- distribution/src/bin/elasticsearch.bat | 2 +- docs/reference/cluster/stats.asciidoc | 9 ++- .../cluster/stats/ClusterStatsNodes.java | 8 +++ .../elasticsearch/monitor/jvm/JvmInfo.java | 60 ++++++++++++++++--- .../ClusterStatsMonitoringDocTests.java | 4 ++ 12 files changed, 107 insertions(+), 34 deletions(-) diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 57d91567b6f55..f6b27b0a2dd26 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -54,10 +54,10 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla into('config') { dirMode 0750 fileMode 0660 - with configFiles(distributionType, oss) + with configFiles(distributionType, oss, jdk) } into('bin') { - with binFiles(distributionType, oss) + with binFiles(distributionType, oss, jdk) } if (jdk) { into('jdk') { diff --git a/distribution/build.gradle b/distribution/build.gradle index f3a59f06d0bdd..99a3e15986905 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -321,13 +321,13 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { from project(':distribution').buildTransportModules } - configFiles = { distributionType, oss -> + configFiles = { distributionType, oss, jdk -> copySpec { with copySpec { // main config files, processed with distribution specific substitutions from '../src/config' exclude 'log4j2.properties' // this is handled separately below - MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss)) + MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss, jdk)) } if (oss) { from project(':distribution').buildOssLog4jConfig @@ -339,7 +339,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } } - binFiles = { distributionType, oss -> + binFiles = { distributionType, oss, jdk -> copySpec { // non-windows files, for all distributions with copySpec { @@ -347,7 +347,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { exclude '*.exe' exclude '*.bat' eachFile { it.setMode(0755) } - MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss)) + MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss, jdk)) } // windows files, only for zip if (distributionType == 'zip') { @@ -355,7 +355,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { from '../src/bin' include '*.bat' filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) - MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss)) + MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss, jdk)) } with copySpec { from '../src/bin' @@ -452,7 +452,7 @@ task run(type: RunTask) { * */ subprojects { - ext.expansionsForDistribution = { distributionType, oss -> + ext.expansionsForDistribution = { distributionType, oss, jdk -> final String defaultHeapSize = "1g" final String packagingPathData = "path.data: /var/lib/elasticsearch" final String pathLogs = "/var/log/elasticsearch" @@ -545,6 +545,10 @@ subprojects { 'zip': 'zip' ], + 'es.bundled_jdk': [ + 'def': jdk ? 'true' : 'false' + ], + 'license.name': [ 'deb': oss ? 'ASL-2.0' : 'Elastic-License' ], diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index a7f178e4b8261..7fd8f03338cdf 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -61,23 +61,23 @@ buildscript { } } -void addProcessFilesTask(String type, boolean oss) { - String packagingFiles = "build/packaging/${ oss ? 'oss-' : ''}${type}" +void addProcessFilesTask(String type, boolean oss, boolean jdk) { + String packagingFiles = "build/packaging/${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}" - String taskName = "process${oss ? 'Oss' : ''}${type.capitalize()}Files" + String taskName = "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files" task(taskName, type: Copy) { into packagingFiles with copySpec { from 'src/common' from "src/${type}" - MavenFilteringHack.filter(it, expansionsForDistribution(type, oss)) + MavenFilteringHack.filter(it, expansionsForDistribution(type, oss, jdk)) } into('etc/elasticsearch') { - with configFiles(type, oss) + with configFiles(type, oss, jdk) } - MavenFilteringHack.filter(it, expansionsForDistribution(type, oss)) + MavenFilteringHack.filter(it, expansionsForDistribution(type, oss, jdk)) doLast { // create empty dirs, we set the permissions when configuring the packages @@ -91,17 +91,21 @@ void addProcessFilesTask(String type, boolean oss) { } } } -addProcessFilesTask('deb', true) -addProcessFilesTask('deb', false) -addProcessFilesTask('rpm', true) -addProcessFilesTask('rpm', false) +addProcessFilesTask('deb', true, true) +addProcessFilesTask('deb', true, false) +addProcessFilesTask('deb', false, true) +addProcessFilesTask('deb', false, false) +addProcessFilesTask('rpm', true, true) +addProcessFilesTask('rpm', true, false) +addProcessFilesTask('rpm', false, true) +addProcessFilesTask('rpm', false, false) // Common configuration that is package dependent. This can't go in ospackage // since we have different templated files that need to be consumed, but the structure // is the same Closure commonPackageConfig(String type, boolean oss, boolean jdk) { return { - dependsOn "process${oss ? 'Oss' : ''}${type.capitalize()}Files" + dependsOn "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files" packageName "elasticsearch${oss ? '-oss' : ''}" arch (type == 'deb' ? 'amd64' : 'X86_64') // Follow elasticsearch's file naming convention @@ -110,7 +114,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) { String prefix = "${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}" destinationDir = file("${prefix}/build/distributions") - String packagingFiles = "build/packaging/${oss ? 'oss-' : ''}${type}" + String packagingFiles = "build/packaging/${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}" String scripts = "${packagingFiles}/scripts" preInstall file("${scripts}/preinst") @@ -125,7 +129,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) { // specify it again explicitly for copying common files into('/usr/share/elasticsearch') { into('bin') { - with binFiles(type, oss) + with binFiles(type, oss, jdk) } from(rootProject.projectDir) { include 'README.textile' @@ -202,7 +206,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) { createDirectoryEntry true fileType CONFIG | NOREPLACE } - String envFile = expansionsForDistribution(type, false)['path.env'] + String envFile = expansionsForDistribution(type, oss, jdk)['path.env'] configurationFile envFile into(new File(envFile).getParent()) { fileType CONFIG | NOREPLACE diff --git a/distribution/src/bin/elasticsearch b/distribution/src/bin/elasticsearch index 9bfaded3385a1..8bdea4950cb75 100755 --- a/distribution/src/bin/elasticsearch +++ b/distribution/src/bin/elasticsearch @@ -29,6 +29,7 @@ if ! echo $* | grep -E '(^-d |-d$| -d |--daemonize$|--daemonize )' > /dev/null; -Des.path.conf="$ES_PATH_CONF" \ -Des.distribution.flavor="$ES_DISTRIBUTION_FLAVOR" \ -Des.distribution.type="$ES_DISTRIBUTION_TYPE" \ + -Des.bundled_jdk="$ES_BUNDLED_JDK" \ -cp "$ES_CLASSPATH" \ org.elasticsearch.bootstrap.Elasticsearch \ "$@" @@ -40,6 +41,7 @@ else -Des.path.conf="$ES_PATH_CONF" \ -Des.distribution.flavor="$ES_DISTRIBUTION_FLAVOR" \ -Des.distribution.type="$ES_DISTRIBUTION_TYPE" \ + -Des.bundled_jdk="$ES_BUNDLED_JDK" \ -cp "$ES_CLASSPATH" \ org.elasticsearch.bootstrap.Elasticsearch \ "$@" \ diff --git a/distribution/src/bin/elasticsearch-env b/distribution/src/bin/elasticsearch-env index 0e7f591adace3..2a490622b34b4 100644 --- a/distribution/src/bin/elasticsearch-env +++ b/distribution/src/bin/elasticsearch-env @@ -82,6 +82,7 @@ ES_PATH_CONF=`cd "$ES_PATH_CONF"; pwd` ES_DISTRIBUTION_FLAVOR=${es.distribution.flavor} ES_DISTRIBUTION_TYPE=${es.distribution.type} +ES_BUNDLED_JDK=${es.bundled_jdk} if [ -z "$ES_TMPDIR" ]; then ES_TMPDIR=`"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.TempDirectory` diff --git a/distribution/src/bin/elasticsearch-env.bat b/distribution/src/bin/elasticsearch-env.bat index baf65a0465c5c..bd34880e40ece 100644 --- a/distribution/src/bin/elasticsearch-env.bat +++ b/distribution/src/bin/elasticsearch-env.bat @@ -27,6 +27,7 @@ for %%I in ("%ES_PATH_CONF%..") do set ES_PATH_CONF=%%~dpfI set ES_DISTRIBUTION_FLAVOR=${es.distribution.flavor} set ES_DISTRIBUTION_TYPE=${es.distribution.type} +set ES_BUNDLED_JDK=${es.bundled_jdk} cd /d "%ES_HOME%" diff --git a/distribution/src/bin/elasticsearch-service.bat b/distribution/src/bin/elasticsearch-service.bat index 2886d6f685687..7a0be55c4f565 100644 --- a/distribution/src/bin/elasticsearch-service.bat +++ b/distribution/src/bin/elasticsearch-service.bat @@ -167,7 +167,7 @@ if "%JVM_SS%" == "" ( goto:eof ) -set ES_PARAMS=-Delasticsearch;-Des.path.home="%ES_HOME%";-Des.path.conf="%ES_PATH_CONF%";-Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%";-Des.distribution.type="%ES_DISTRIBUTION_TYPE%" +set ES_PARAMS=-Delasticsearch;-Des.path.home="%ES_HOME%";-Des.path.conf="%ES_PATH_CONF%";-Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%";-Des.distribution.type="%ES_DISTRIBUTION_TYPE%";-Des.bundled_jdk="%ES_BUNDLED_JDK%" if "%ES_START_TYPE%" == "" set ES_START_TYPE=manual if "%ES_STOP_TIMEOUT%" == "" set ES_STOP_TIMEOUT=0 diff --git a/distribution/src/bin/elasticsearch.bat b/distribution/src/bin/elasticsearch.bat index 975af52e39523..7df6f19fc0765 100644 --- a/distribution/src/bin/elasticsearch.bat +++ b/distribution/src/bin/elasticsearch.bat @@ -50,7 +50,7 @@ if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" ( exit /b 1 ) -%JAVA% %ES_JAVA_OPTS% -Delasticsearch -Des.path.home="%ES_HOME%" -Des.path.conf="%ES_PATH_CONF%" -Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%" -Des.distribution.type="%ES_DISTRIBUTION_TYPE%" -cp "%ES_CLASSPATH%" "org.elasticsearch.bootstrap.Elasticsearch" !newparams! +%JAVA% %ES_JAVA_OPTS% -Delasticsearch -Des.path.home="%ES_HOME%" -Des.path.conf="%ES_PATH_CONF%" -Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%" -Des.distribution.type="%ES_DISTRIBUTION_TYPE%" -Des.bundled_jd="%ES_BUNDLED_JDK%" -cp "%ES_CLASSPATH%" "org.elasticsearch.bootstrap.Elasticsearch" !newparams! endlocal endlocal diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 83515e32bf046..38028d8cf109f 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -155,10 +155,12 @@ Will return, for example: "max_uptime_in_millis": 13737, "versions": [ { - "version": "1.8.0_74", - "vm_name": "Java HotSpot(TM) 64-Bit Server VM", - "vm_version": "25.74-b02", + "version": "12", + "vm_name": "OpenJDK 64-Bit Server VM", + "vm_version": "12+33", "vm_vendor": "Oracle Corporation", + "bundled_jdk": true, + "using_bundled_jdk": true, "count": 1 } ], @@ -200,6 +202,7 @@ Will return, for example: // TESTRESPONSE[s/"plugins": \[[^\]]*\]/"plugins": $body.$_path/] // TESTRESPONSE[s/"network_types": \{[^\}]*\}/"network_types": $body.$_path/] // TESTRESPONSE[s/"discovery_types": \{[^\}]*\}/"discovery_types": $body.$_path/] +// TESTRESPONSE[s/: true|false/: $body.$_path/] // TESTRESPONSE[s/: (\-)?[0-9]+/: $body.$_path/] // TESTRESPONSE[s/: "[^"]*"/: $body.$_path/] // These replacements do a few things: diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index 6597cdf30f5e7..3b3fa480326ae 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -502,6 +502,8 @@ static final class Fields { static final String VM_NAME = "vm_name"; static final String VM_VERSION = "vm_version"; static final String VM_VENDOR = "vm_vendor"; + static final String BUNDLED_JDK = "bundled_jdk"; + static final String USING_BUNDLED_JDK = "using_bundled_jdk"; static final String COUNT = "count"; static final String THREADS = "threads"; static final String MAX_UPTIME = "max_uptime"; @@ -524,6 +526,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) builder.field(Fields.VM_NAME, v.key.vmName); builder.field(Fields.VM_VERSION, v.key.vmVersion); builder.field(Fields.VM_VENDOR, v.key.vmVendor); + builder.field(Fields.BUNDLED_JDK, v.key.bundledJdk); + builder.field(Fields.USING_BUNDLED_JDK, v.key.usingBundledJdk); builder.field(Fields.COUNT, v.value); builder.endObject(); } @@ -543,12 +547,16 @@ public static class JvmVersion { String vmName; String vmVersion; String vmVendor; + boolean bundledJdk; + Boolean usingBundledJdk; JvmVersion(JvmInfo jvmInfo) { version = jvmInfo.version(); vmName = jvmInfo.getVmName(); vmVersion = jvmInfo.getVmVersion(); vmVendor = jvmInfo.getVmVendor(); + bundledJdk = jvmInfo.getBundledJdk(); + usingBundledJdk = jvmInfo.getUsingBundledJdk(); } @Override diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index f24acc9c034e9..e8380a77962d4 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -19,6 +19,10 @@ package org.elasticsearch.monitor.jvm; +import org.elasticsearch.Version; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -150,10 +154,24 @@ public class JvmInfo implements Writeable, ToXContentFragment { } + final boolean bundledJdk = Booleans.parseBoolean(System.getProperty("es.bundled_jdk", Boolean.FALSE.toString())); + final Boolean usingBundledJdk = bundledJdk ? usingBundledJdk() : null; + INSTANCE = new JvmInfo(JvmPid.getPid(), System.getProperty("java.version"), runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(), - runtimeMXBean.getVmVendor(), runtimeMXBean.getStartTime(), configuredInitialHeapSize, configuredMaxHeapSize, - mem, inputArguments, bootClassPath, classPath, systemProperties, gcCollectors, memoryPools, onError, onOutOfMemoryError, - useCompressedOops, useG1GC, useSerialGC); + runtimeMXBean.getVmVendor(), bundledJdk, usingBundledJdk, runtimeMXBean.getStartTime(), configuredInitialHeapSize, + configuredMaxHeapSize, mem, inputArguments, bootClassPath, classPath, systemProperties, gcCollectors, memoryPools, onError, + onOutOfMemoryError, useCompressedOops, useG1GC, useSerialGC); + } + + @SuppressForbidden(reason = "PathUtils#get") + private static boolean usingBundledJdk() { + /* + * We are using the bundled JDK if java.home is the jdk sub-directory of our working directory. This is because we always set + * the working directory of Elasticsearch to home, and the bundled JDK is in the jdk sub-directory there. + */ + final String javaHome = System.getProperty("java.home"); + final String userDir = System.getProperty("user.dir"); + return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk").toAbsolutePath()); } public static JvmInfo jvmInfo() { @@ -170,6 +188,8 @@ public static JvmInfo jvmInfo() { private final String vmName; private final String vmVersion; private final String vmVendor; + private final boolean bundledJdk; + private final Boolean usingBundledJdk; private final long startTime; private final long configuredInitialHeapSize; private final long configuredMaxHeapSize; @@ -186,15 +206,18 @@ public static JvmInfo jvmInfo() { private final String useG1GC; private final String useSerialGC; - private JvmInfo(long pid, String version, String vmName, String vmVersion, String vmVendor, long startTime, - long configuredInitialHeapSize, long configuredMaxHeapSize, Mem mem, String[] inputArguments, String bootClassPath, - String classPath, Map systemProperties, String[] gcCollectors, String[] memoryPools, String onError, - String onOutOfMemoryError, String useCompressedOops, String useG1GC, String useSerialGC) { + private JvmInfo(long pid, String version, String vmName, String vmVersion, String vmVendor, boolean bundledJdk, Boolean usingBundledJdk, + long startTime, long configuredInitialHeapSize, long configuredMaxHeapSize, Mem mem, String[] inputArguments, + String bootClassPath, String classPath, Map systemProperties, String[] gcCollectors, + String[] memoryPools, String onError, String onOutOfMemoryError, String useCompressedOops, String useG1GC, + String useSerialGC) { this.pid = pid; this.version = version; this.vmName = vmName; this.vmVersion = vmVersion; this.vmVendor = vmVendor; + this.bundledJdk = bundledJdk; + this.usingBundledJdk = usingBundledJdk; this.startTime = startTime; this.configuredInitialHeapSize = configuredInitialHeapSize; this.configuredMaxHeapSize = configuredMaxHeapSize; @@ -218,6 +241,13 @@ public JvmInfo(StreamInput in) throws IOException { vmName = in.readString(); vmVersion = in.readString(); vmVendor = in.readString(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + bundledJdk = in.readBoolean(); + usingBundledJdk = in.readOptionalBoolean(); + } else { + bundledJdk = false; + usingBundledJdk = null; + } startTime = in.readLong(); inputArguments = new String[in.readInt()]; for (int i = 0; i < inputArguments.length; i++) { @@ -246,6 +276,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(vmName); out.writeString(vmVersion); out.writeString(vmVendor); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeBoolean(bundledJdk); + out.writeOptionalBoolean(usingBundledJdk); + } out.writeLong(startTime); out.writeInt(inputArguments.length); for (String inputArgument : inputArguments) { @@ -360,6 +394,14 @@ public String getVmVendor() { return this.vmVendor; } + public boolean getBundledJdk() { + return bundledJdk; + } + + public Boolean getUsingBundledJdk() { + return usingBundledJdk; + } + public long getStartTime() { return this.startTime; } @@ -436,6 +478,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.VM_NAME, vmName); builder.field(Fields.VM_VERSION, vmVersion); builder.field(Fields.VM_VENDOR, vmVendor); + builder.field(Fields.BUNDLED_JDK, bundledJdk); + builder.field(Fields.USING_BUNDLED_JDK, usingBundledJdk); builder.timeField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime); builder.startObject(Fields.MEM); @@ -464,6 +508,8 @@ static final class Fields { static final String VM_NAME = "vm_name"; static final String VM_VERSION = "vm_version"; static final String VM_VENDOR = "vm_vendor"; + static final String BUNDLED_JDK = "bundled_jdk"; + static final String USING_BUNDLED_JDK = "using_bundled_jdk"; static final String START_TIME = "start_time"; static final String START_TIME_IN_MILLIS = "start_time_in_millis"; diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index fbb923c3f2f69..e436484bceba4 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -265,6 +265,8 @@ public void testToXContent() throws IOException { when(mockJvmInfo.getVmName()).thenReturn("_jvm_vm_name"); when(mockJvmInfo.getVmVersion()).thenReturn("_jvm_vm_version"); when(mockJvmInfo.getVmVendor()).thenReturn("_jvm_vm_vendor"); + when(mockJvmInfo.getBundledJdk()).thenReturn(true); + when(mockJvmInfo.getUsingBundledJdk()).thenReturn(true); final NodeStats mockNodeStats = mock(NodeStats.class); when(mockNodeStats.getTimestamp()).thenReturn(0L); @@ -481,6 +483,8 @@ public void testToXContent() throws IOException { + "\"vm_name\":\"_jvm_vm_name\"," + "\"vm_version\":\"_jvm_vm_version\"," + "\"vm_vendor\":\"_jvm_vm_vendor\"," + + "\"bundled_jdk\":true," + + "\"using_bundled_jdk\":true," + "\"count\":1" + "}" + "]," From d96e71d35e7accea12750a592fd1f44f64485fb5 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 29 Mar 2019 06:04:46 -0700 Subject: [PATCH 28/63] Add docs for bundled jdk (#40487) This commit changes the note in docs about required java version to note the existence of the bundled jdk and how to bring your own java. It also reorganizes the zip/targz docs as zip is no longer suitable on Linux/MacOS. --- docs/reference/getting-started.asciidoc | 27 ++++--- docs/reference/setup.asciidoc | 21 +++--- .../important-settings/error-file.asciidoc | 2 +- .../heap-dump-path.asciidoc | 2 +- docs/reference/setup/install.asciidoc | 15 ++-- docs/reference/setup/install/deb.asciidoc | 6 +- docs/reference/setup/install/rpm.asciidoc | 8 +- ...-daemon.asciidoc => targz-daemon.asciidoc} | 0 ...gz-start.asciidoc => targz-start.asciidoc} | 0 .../{zip-targz.asciidoc => targz.asciidoc} | 74 +++++++++---------- docs/reference/setup/install/windows.asciidoc | 6 +- .../setup/install/zip-windows.asciidoc | 8 +- docs/reference/setup/starting.asciidoc | 6 +- 13 files changed, 87 insertions(+), 88 deletions(-) rename docs/reference/setup/install/{zip-targz-daemon.asciidoc => targz-daemon.asciidoc} (100%) rename docs/reference/setup/install/{zip-targz-start.asciidoc => targz-start.asciidoc} (100%) rename docs/reference/setup/install/{zip-targz.asciidoc => targz.asciidoc} (74%) diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 9e7571e038315..6e112559f8771 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -114,26 +114,25 @@ https://www.elastic.co/cloud/elasticsearch-service/signup[Try out the Elasticsearch Service for free]. ============== -Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed): +NOTE: Elasticsearch includes a bundled version of http://openjdk.java.net[OpenJDK] +from the JDK maintainers (GPLv2+CE). To use your own version of Java, +see the <> -[source,sh] --------------------------------------------------- -java -version -echo $JAVA_HOME --------------------------------------------------- - -Once we have Java set up, we can then download and run Elasticsearch. The binaries are available from http://www.elastic.co/downloads[`www.elastic.co/downloads`] along with all the releases that have been made in the past. For each release, you have a choice among a `zip` or `tar` archive, a `DEB` or `RPM` package, or a Windows `MSI` installation package. +The binaries are available from http://www.elastic.co/downloads[`www.elastic.co/downloads`] +along with all the releases that have been made in the past. For each release, platform +dependent archive versions are available for Windows, Linux and MacOS, as well as `DEB` and `RPM` +packages for Linux, and `MSI` installation packages for Windows. [float] -=== Installation example with tar +=== Installation example on Linux -For simplicity, let's use the {ref}/zip-targz.html[tar] file. +For simplicity, let's use the {ref}/targz.html[tar] file. -Let's download the Elasticsearch {version} tar as follows: +Let's download the Elasticsearch {version} Linux tar as follows: ["source","sh",subs="attributes,callouts"] -------------------------------------------------- -curl -L -O https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}.tar.gz +curl -L -O https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-linux-x86_64.tar.gz -------------------------------------------------- // NOTCONSOLE @@ -141,7 +140,7 @@ Then extract it as follows: ["source","sh",subs="attributes,callouts"] -------------------------------------------------- -tar -xvf elasticsearch-{version}.tar.gz +tar -xvf elasticsearch-{version}-linux-x86_64.tar.gz -------------------------------------------------- It will then create a bunch of files and folders in your current directory. We then go into the bin directory as follows: @@ -172,7 +171,7 @@ Then double-click the downloaded file to launch the GUI. Within the first screen image::images/msi_installer/msi_installer_locations.png[] Then select whether to install as a service or start Elasticsearch manually as needed. -To align with the tar example, choose not to install as a service: +To align with the Linux example, choose not to install as a service: [[getting-started-msi-installer-service]] image::images/msi_installer/msi_installer_no_service.png[] diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index c5cc25471264c..d54941ed6aad5 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -23,20 +23,19 @@ platforms, but it is possible that it will work on other platforms too. [[jvm-version]] == Java (JVM) Version -Elasticsearch is built using Java, and requires at least -http://www.oracle.com/technetwork/java/javase/downloads/index.html[Java {jdk_major}] -in order to run. Only Oracle's Java and the OpenJDK are supported. The same JVM -version should be used on all Elasticsearch nodes and clients. - -We recommend installing Java version *{jdk} or a later version in the Java -{jdk_major} release series*. We recommend using a -link:/support/matrix[supported] +Elasticsearch is built using Java, and includes a bundled version of +http://openjdk.java.net[OpenJDK] from the JDK maintainers (GPLv2+CE) +within each distribution. The bundled JVM exists within the `jdk` directory of +the Elasticsearch home directory. + +To use your own version of Java, set the `JAVA_HOME` environment variable. +When using your own version, the bundled JVM directory may be removed. +If not using the bundled JVM, we recommend installing Java version + *{jdk} or a later version in the Java {jdk_major} release series*. We recommend +using a link:/support/matrix[supported] http://www.oracle.com/technetwork/java/eol-135779.html[LTS version of Java]. Elasticsearch will refuse to start if a known-bad version of Java is used. -The version of Java that Elasticsearch will use can be configured by setting -the `JAVA_HOME` environment variable. - -- include::setup/install.asciidoc[] diff --git a/docs/reference/setup/important-settings/error-file.asciidoc b/docs/reference/setup/important-settings/error-file.asciidoc index d58a752ac28fa..d78b0e24ec7ad 100644 --- a/docs/reference/setup/important-settings/error-file.asciidoc +++ b/docs/reference/setup/important-settings/error-file.asciidoc @@ -5,7 +5,7 @@ By default, Elasticsearch configures the JVM to write fatal error logs to the default logging directory (this is `/var/log/elasticsearch` for the <> and <> package distributions, and the `logs` directory under the root of the Elasticsearch installation for the -<> archive distributions). These are logs +<> and <> archive distributions). These are logs produced by the JVM when it encounters a fatal error (e.g., a segmentation fault). If this path is not suitable for receiving logs, you should modify the entry `-XX:ErrorFile=...` in diff --git a/docs/reference/setup/important-settings/heap-dump-path.asciidoc b/docs/reference/setup/important-settings/heap-dump-path.asciidoc index fb8c7ff35f0d0..25f3fbcebcbf3 100644 --- a/docs/reference/setup/important-settings/heap-dump-path.asciidoc +++ b/docs/reference/setup/important-settings/heap-dump-path.asciidoc @@ -5,7 +5,7 @@ By default, Elasticsearch configures the JVM to dump the heap on out of memory exceptions to the default data directory (this is `/var/lib/elasticsearch` for the <> and <> package distributions, and the `data` directory under the root of the -Elasticsearch installation for the <> archive +Elasticsearch installation for the <> and <> archive distributions). If this path is not suitable for receiving heap dumps, you should modify the entry `-XX:HeapDumpPath=...` in <>. If you specify a directory, the JVM diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index 26a207824af01..f5dd418155d55 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -16,12 +16,17 @@ Elasticsearch Service for free]. Elasticsearch is provided in the following package formats: [horizontal] -`zip`/`tar.gz`:: +Linux and MacOS `tar.gz` archives:: -The `zip` and `tar.gz` packages are suitable for installation on any system -and are the easiest choice for getting started with Elasticsearch on most systems. +The `tar.gz` archives are available for installation on any Linux distribution and MacOS. + -<> or <> +<> + +Windows `.zip` archive:: + +The `zip` archive is suitable for installation on Windows. ++ +<> `deb`:: @@ -69,7 +74,7 @@ Chef:: https://github.com/elastic/cookbook-elasticsearch[cookbook-elasticsearc Ansible:: https://github.com/elastic/ansible-elasticsearch[ansible-elasticsearch] -include::install/zip-targz.asciidoc[] +include::install/targz.asciidoc[] include::install/zip-windows.asciidoc[] diff --git a/docs/reference/setup/install/deb.asciidoc b/docs/reference/setup/install/deb.asciidoc index 97b4762338936..192737581bd57 100644 --- a/docs/reference/setup/install/deb.asciidoc +++ b/docs/reference/setup/install/deb.asciidoc @@ -11,9 +11,9 @@ The latest stable version of Elasticsearch can be found on the link:/downloads/elasticsearch[Download Elasticsearch] page. Other versions can be found on the link:/downloads/past-releases[Past Releases page]. -NOTE: Elasticsearch requires Java 8 or later. Use the -http://www.oracle.com/technetwork/java/javase/downloads/index.html[official Oracle distribution] -or an open-source distribution such as http://openjdk.java.net[OpenJDK]. +NOTE: Elasticsearch includes a bundled version of http://openjdk.java.net[OpenJDK] +from the JDK maintainers (GPLv2+CE). To use your own version of Java, +see the <> [[deb-key]] ==== Import the Elasticsearch PGP Key diff --git a/docs/reference/setup/install/rpm.asciidoc b/docs/reference/setup/install/rpm.asciidoc index a450e202b6896..c6f5647d08e26 100644 --- a/docs/reference/setup/install/rpm.asciidoc +++ b/docs/reference/setup/install/rpm.asciidoc @@ -7,7 +7,7 @@ Elasticsearch on any RPM-based system such as OpenSuSE, SLES, Centos, Red Hat, and Oracle Enterprise. NOTE: RPM install is not supported on distributions with old versions of RPM, -such as SLES 11 and CentOS 5. Please see <> instead. +such as SLES 11 and CentOS 5. Please see <> instead. include::license.asciidoc[] @@ -15,9 +15,9 @@ The latest stable version of Elasticsearch can be found on the link:/downloads/elasticsearch[Download Elasticsearch] page. Other versions can be found on the link:/downloads/past-releases[Past Releases page]. -NOTE: Elasticsearch requires Java 8 or later. Use the -http://www.oracle.com/technetwork/java/javase/downloads/index.html[official Oracle distribution] -or an open-source distribution such as http://openjdk.java.net[OpenJDK]. +NOTE: Elasticsearch includes a bundled version of http://openjdk.java.net[OpenJDK] +from the JDK maintainers (GPLv2+CE). To use your own version of Java, +see the <> [[rpm-key]] ==== Import the Elasticsearch PGP Key diff --git a/docs/reference/setup/install/zip-targz-daemon.asciidoc b/docs/reference/setup/install/targz-daemon.asciidoc similarity index 100% rename from docs/reference/setup/install/zip-targz-daemon.asciidoc rename to docs/reference/setup/install/targz-daemon.asciidoc diff --git a/docs/reference/setup/install/zip-targz-start.asciidoc b/docs/reference/setup/install/targz-start.asciidoc similarity index 100% rename from docs/reference/setup/install/zip-targz-start.asciidoc rename to docs/reference/setup/install/targz-start.asciidoc diff --git a/docs/reference/setup/install/zip-targz.asciidoc b/docs/reference/setup/install/targz.asciidoc similarity index 74% rename from docs/reference/setup/install/zip-targz.asciidoc rename to docs/reference/setup/install/targz.asciidoc index d532438103754..75c9fdb42188c 100644 --- a/docs/reference/setup/install/zip-targz.asciidoc +++ b/docs/reference/setup/install/targz.asciidoc @@ -1,9 +1,7 @@ -[[zip-targz]] -=== Install Elasticsearch with `.zip` or `.tar.gz` +[[targz]] +=== Install Elasticsearch from archive on Linux or MacOS -Elasticsearch is provided as a `.zip` and as a `.tar.gz` package. These -packages can be used to install Elasticsearch on any system and are the -easiest package format to use when trying out Elasticsearch. +Elasticsearch is as a `.tar.gz` archive for Linux and MacOS. include::license.asciidoc[] @@ -12,12 +10,12 @@ link:/downloads/elasticsearch[Download Elasticsearch] page. Other versions can be found on the link:/downloads/past-releases[Past Releases page]. -NOTE: Elasticsearch requires Java 8 or later. Use the -http://www.oracle.com/technetwork/java/javase/downloads/index.html[official Oracle distribution] -or an open-source distribution such as http://openjdk.java.net[OpenJDK]. +NOTE: Elasticsearch includes a bundled version of http://openjdk.java.net[OpenJDK] +from the JDK maintainers (GPLv2+CE). To use your own version of Java, +see the <> -[[install-zip]] -==== Download and install the `.zip` package +[[install-linux]] +==== Download and install archive for Linux ifeval::["{release-state}"=="unreleased"] @@ -27,30 +25,28 @@ endif::[] ifeval::["{release-state}"!="unreleased"] -The `.zip` archive for Elasticsearch v{version} can be downloaded and installed as follows: - +The Linux archive for Elasticsearch v{version} can be downloaded and installed as follows: ["source","sh",subs="attributes"] -------------------------------------------- -wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-windows-x86_64.zip -wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-windows-x86_64.zip.sha512 -shasum -a 512 -c elasticsearch-{version}-windows-x86_64.zip.sha512 <1> -unzip elasticsearch-{version}-windows-x86_64.zip +wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-linux-x86_64.tar.gz +wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-linux-x86_64.tar.gz.sha512 +shasum -a 512 -c elasticsearch-{version}-linux-x86_64.tar.gz.sha512 <1> +tar -xzf elasticsearch-{version}-linux-x86_64.tar.gz cd elasticsearch-{version}/ <2> -------------------------------------------- -<1> Compares the SHA of the downloaded `.zip` archive and the published checksum, which should output - `elasticsearch-{version}-windows-x86_64.zip: OK`. +<1> Compares the SHA of the downloaded `.tar.gz` archive and the published checksum, which should output + `elasticsearch-{version}-linux-x86_64.tar.gz: OK`. <2> This directory is known as `$ES_HOME`. -Alternatively, you can download the following package, which contains only -features that are available under the Apache 2.0 license: -https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-{version}-windows-x86_64.zip +Alternatively, you can download the following package, which includes only +Apache 2.0 licensed code: +https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-{version}-linux-x86_64.tar.gz endif::[] - -[[install-targz]] -==== Download and install the `.tar.gz` package +[[install-macos]] +==== Download and install archive for MacOS ifeval::["{release-state}"=="unreleased"] @@ -60,28 +56,28 @@ endif::[] ifeval::["{release-state}"!="unreleased"] -The `.tar.gz` archive for Elasticsearch v{version} can be downloaded and installed as follows: +The MacOS archive for Elasticsearch v{version} can be downloaded and installed as follows: ["source","sh",subs="attributes"] -------------------------------------------- -wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-linux-x86_64.tar.gz -wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-linux-x86_64.tar.gz.sha512 -shasum -a 512 -c elasticsearch-{version}-linux-x86_64.tar.gz.sha512 <1> -tar -xzf elasticsearch-{version}-linux-x86_64.tar.gz +wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-darwin-x86_64.tar.gz +wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}-darwin-x86_64.tar.gz.sha512 +shasum -a 512 -c elasticsearch-{version}-darwin-x86_64.tar.gz.sha512 <1> +tar -xzf elasticsearch-{version}-darwin-x86_64.tar.gz cd elasticsearch-{version}/ <2> -------------------------------------------- <1> Compares the SHA of the downloaded `.tar.gz` archive and the published checksum, which should output - `elasticsearch-{version}-linux-x86_64.tar.gz: OK`. + `elasticsearch-{version}-darwin-x86_64.tar.gz: OK`. <2> This directory is known as `$ES_HOME`. Alternatively, you can download the following package, which includes only Apache 2.0 licensed code: -https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-{version}-linux-x86_64.tar.gz +https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-{version}-darwin-x86_64.tar.gz endif::[] ifdef::include-xpack[] -[[zip-targz-enable-indices]] +[[targz-enable-indices]] ==== Enable automatic creation of {xpack} indices {xpack} will try to automatically create a number of indices within {es}. @@ -89,8 +85,8 @@ include::xpack-indices.asciidoc[] endif::include-xpack[] -[[zip-targz-running]] -include::zip-targz-start.asciidoc[] +[[targz-running]] +include::targz-start.asciidoc[] include::check-running.asciidoc[] @@ -98,9 +94,9 @@ Log printing to `stdout` can be disabled using the `-q` or `--quiet` option on the command line. [[setup-installation-daemon]] -include::zip-targz-daemon.asciidoc[] +include::targz-daemon.asciidoc[] -[[zip-targz-configuring]] +[[targz-configuring]] ==== Configuring Elasticsearch on the command line Elasticsearch loads its configuration from the `$ES_HOME/config/elasticsearch.yml` @@ -119,10 +115,10 @@ TIP: Typically, any cluster-wide settings (like `cluster.name`) should be added to the `elasticsearch.yml` config file, while any node-specific settings such as `node.name` could be specified on the command line. -[[zip-targz-layout]] -==== Directory layout of `.zip` and `.tar.gz` archives +[[targz-layout]] +==== Directory layout of archives -The `.zip` and `.tar.gz` packages are entirely self-contained. All files and +The archive distributions are entirely self-contained. All files and directories are, by default, contained within `$ES_HOME` -- the directory created when unpacking the archive. diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index 46aadbc34a9f7..83d1251148c4a 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -17,9 +17,9 @@ link:/downloads/elasticsearch[Download Elasticsearch] page. Other versions can be found on the link:/downloads/past-releases[Past Releases page]. -NOTE: Elasticsearch requires Java 8 or later. Use the -http://www.oracle.com/technetwork/java/javase/downloads/index.html[official Oracle distribution] -or an open-source distribution such as http://openjdk.java.net[OpenJDK]. +NOTE: Elasticsearch includes a bundled version of http://openjdk.java.net[OpenJDK] +from the JDK maintainers (GPLv2+CE). To use your own version of Java, +see the <> [[download-msi]] ==== Download the `.msi` package diff --git a/docs/reference/setup/install/zip-windows.asciidoc b/docs/reference/setup/install/zip-windows.asciidoc index 967b449bc972b..efed4b613c54b 100644 --- a/docs/reference/setup/install/zip-windows.asciidoc +++ b/docs/reference/setup/install/zip-windows.asciidoc @@ -1,7 +1,7 @@ [[zip-windows]] === Install Elasticsearch with `.zip` on Windows -Elasticsearch can be installed on Windows using the `.zip` package. This +Elasticsearch can be installed on Windows using the Windows `.zip` archive. This comes with a `elasticsearch-service.bat` command which will setup Elasticsearch to run as a service. @@ -16,9 +16,9 @@ link:/downloads/elasticsearch[Download Elasticsearch] page. Other versions can be found on the link:/downloads/past-releases[Past Releases page]. -NOTE: Elasticsearch requires Java 8 or later. Use the -http://www.oracle.com/technetwork/java/javase/downloads/index.html[official Oracle distribution] -or an open-source distribution such as http://openjdk.java.net[OpenJDK]. +NOTE: Elasticsearch includes a bundled version of http://openjdk.java.net[OpenJDK] +from the JDK maintainers (GPLv2+CE). To use your own version of Java, +see the <> [[install-windows]] ==== Download and install the `.zip` package diff --git a/docs/reference/setup/starting.asciidoc b/docs/reference/setup/starting.asciidoc index 6fab871e7c9ca..4474c247361db 100644 --- a/docs/reference/setup/starting.asciidoc +++ b/docs/reference/setup/starting.asciidoc @@ -11,10 +11,10 @@ If you installed {es} with a `.tar.gz` package, you can start {es} from the command line. [float] -include::install/zip-targz-start.asciidoc[] +include::install/targz-start.asciidoc[] [float] -include::install/zip-targz-daemon.asciidoc[] +include::install/targz-daemon.asciidoc[] [float] [[start-zip]] @@ -69,4 +69,4 @@ include::install/init-systemd.asciidoc[] include::install/rpm-init.asciidoc[] [float] -include::install/systemd.asciidoc[] \ No newline at end of file +include::install/systemd.asciidoc[] From f8cbb74d5f4e37a62be24a1f4bc882907811a4fe Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 29 Mar 2019 10:18:59 -0400 Subject: [PATCH 29/63] Fix archives links in plugins docs This commit fixes some broken links to the archive layouts in the plugins docs. --- docs/plugins/plugin-script.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 0612d3992af17..9ab213bfb3e2c 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -5,7 +5,8 @@ The `plugin` script is used to install, list, and remove plugins. It is located in the `$ES_HOME/bin` directory by default but it may be in a different location depending on which Elasticsearch package you installed: -* {ref}/zip-targz.html#zip-targz-layout[Directory layout of `.zip` and `.tar.gz` archives] +* {ref}/targz.html#targz-layout[Directory layout of `.tar.gz` archives] +* {ref}/zip-windows.html#windows-layout[Directory layout of Windows `.zip` archives] * {ref}/deb.html#deb-layout[Directory layout of Debian package] * {ref}/rpm.html#rpm-layout[Directory layout of RPM] From 56d9a74edc7e80bbb85ce997fff61cfbf8399edd Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 29 Mar 2019 10:44:08 -0400 Subject: [PATCH 30/63] Fix more broken links in plugins docs The archive docs were moved around a bit, and this commit fixes another broken link in the plugins docs. --- docs/plugins/plugin-script.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 9ab213bfb3e2c..950a0f86e4149 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -225,7 +225,8 @@ bin\elasticsearch-plugin install analysis-icu The default location of the `plugins` directory depends on which package you install: -* {ref}/zip-targz.html#zip-targz-layout[Directory layout of `.zip` and `.tar.gz` archives] +* {ref}/targz.html#targz-layout[Directory layout of `.tar.gz` archives] +* {ref}/zip-windows.html#windows-layout[Directory layout of Windows `.zip` archives] * {ref}/deb.html#deb-layout[Directory layout of Debian package] * {ref}/rpm.html#rpm-layout[Directory layout of RPM] From c8205494cc4e0c61529ce8917ec3a96ae3fc1b41 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 29 Mar 2019 16:46:55 +0200 Subject: [PATCH 31/63] Fix repository-hdfs when no docker and unnecesary fixture The hdfs-fixture is actually executed in plugin/repository-hdfs as a dependency. The fixture is not needed and actually causes a failure because we have two copies now and both use the same ports. --- plugins/repository-hdfs/build.gradle | 1 + test/fixtures/hdfs-fixture/build.gradle | 15 +-------------- test/fixtures/hdfs-fixture/docker-compose.yml | 12 ------------ 3 files changed, 2 insertions(+), 26 deletions(-) delete mode 100644 test/fixtures/hdfs-fixture/docker-compose.yml diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 946b377491d26..d5addc663ca5b 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -79,6 +79,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture executable = new File(project.runtimeJavaHome, 'bin/java') env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }" + onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.enabled } waitCondition = { fixture, ant -> // the hdfs.MiniHDFS fixture writes the ports file when // it's ready, so we can just wait for the file to exist diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index f2aebda46b875..bd2dbec6649ab 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -18,23 +18,10 @@ */ apply plugin: 'elasticsearch.build' -apply plugin: 'elasticsearch.test.fixtures' dependencies { compile "org.apache.hadoop:hadoop-minicluster:2.8.1" } -task syncClasses(type: Sync) { - from sourceSets.test.runtimeClasspath - into "${buildDir}/fixture" -} - -preProcessFixture { - dependsOn syncClasses - - doLast { - file("${buildDir}/shared").mkdirs() - } -} - unitTest.enabled = false +thirdPartyAudit.enabled = false diff --git a/test/fixtures/hdfs-fixture/docker-compose.yml b/test/fixtures/hdfs-fixture/docker-compose.yml deleted file mode 100644 index ee86bf90fecee..0000000000000 --- a/test/fixtures/hdfs-fixture/docker-compose.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: '3' -services: - hdfs: - hostname: hdfs.build.elastic.co - image: ubuntu:14.04 - volumes: - - ./build/fixture:/fixture - ports: - # FIXME: Don't fix the host ports - - "9999:9999" - - "9998:9999" - entrypoint: apt-get update && apt-get install net-tools && java -cp "/fixture:/fixture/*" hdfs.MiniHDFS /data From 3fcfd603253810b86e818797409ee56e085109d4 Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Fri, 29 Mar 2019 17:30:08 +0100 Subject: [PATCH 32/63] Unmute and fix testSubParserArray (#40626) testSubParserArray failed, fixed and improved to not always have an object as outer-level inside array. Closes #40617 --- .../elasticsearch/common/xcontent/XContentParserTests.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java index 0fe8a2b9f91fb..606d019f3c4f7 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.core.JsonParseException; - import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -369,7 +368,6 @@ public void testSubParserObject() throws IOException { } } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/40617") public void testSubParserArray() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); int numberOfArrayElements = randomInt(10); @@ -378,7 +376,7 @@ public void testSubParserArray() throws IOException { builder.startArray(); int numberOfTokens = 0; for (int i = 0; i < numberOfArrayElements; ++i) { - numberOfTokens += generateRandomObjectForMarking(builder); + numberOfTokens += generateRandomObject(builder, 0); } builder.endArray(); builder.endObject(); @@ -392,7 +390,7 @@ public void testSubParserArray() throws IOException { assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); // [ XContentParser subParser = new XContentSubParser(parser); try { - int tokensToSkip = randomInt(numberOfTokens - 1); + int tokensToSkip = randomInt(numberOfTokens); for (int i = 0; i < tokensToSkip; i++) { // Simulate incomplete parsing assertNotNull(subParser.nextToken()); From 2e2c08b011cb419c86acc9778ccd57771c228bbb Mon Sep 17 00:00:00 2001 From: David Roberts Date: Fri, 29 Mar 2019 16:40:25 +0000 Subject: [PATCH 33/63] [DOCS] Use "source" instead of "inline" in ML docs (#40635) Specifying an inline script in an "inline" field was deprecated in 5.x. The new field name is "source". (Since 6.x still accepts "inline" I will only backport this docs change as far as 7.0.) --- docs/reference/ml/transforms.asciidoc | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/reference/ml/transforms.asciidoc b/docs/reference/ml/transforms.asciidoc index a3e7df9fdf27a..e49b1ff32d4f2 100644 --- a/docs/reference/ml/transforms.asciidoc +++ b/docs/reference/ml/transforms.asciidoc @@ -134,7 +134,7 @@ PUT _ml/datafeeds/datafeed-test1 "total_error_count": { <2> "script": { "lang": "expression", - "inline": "doc['error_count'].value + doc['aborted_count'].value" + "source": "doc['error_count'].value + doc['aborted_count'].value" } } } @@ -239,7 +239,7 @@ PUT _ml/datafeeds/datafeed-test2 "my_script_field": { "script": { "lang": "painless", - "inline": "doc['some_field'].value + '_' + doc['another_field'].value" <2> + "source": "doc['some_field'].value + '_' + doc['another_field'].value" <2> } } } @@ -276,7 +276,7 @@ POST _ml/datafeeds/datafeed-test2/_update "my_script_field": { "script": { "lang": "painless", - "inline": "doc['another_field'].value.trim()" <1> + "source": "doc['another_field'].value.trim()" <1> } } } @@ -312,7 +312,7 @@ POST _ml/datafeeds/datafeed-test2/_update "my_script_field": { "script": { "lang": "painless", - "inline": "doc['some_field'].value.toLowerCase()" <1> + "source": "doc['some_field'].value.toLowerCase()" <1> } } } @@ -349,7 +349,7 @@ POST _ml/datafeeds/datafeed-test2/_update "my_script_field": { "script": { "lang": "painless", - "inline": "doc['some_field'].value.substring(0, 1).toUpperCase() + doc['some_field'].value.substring(1).toLowerCase()" <1> + "source": "doc['some_field'].value.substring(0, 1).toUpperCase() + doc['some_field'].value.substring(1).toLowerCase()" <1> } } } @@ -386,7 +386,7 @@ POST _ml/datafeeds/datafeed-test2/_update "my_script_field": { "script": { "lang": "painless", - "inline": "/\\s/.matcher(doc['tokenstring2'].value).replaceAll('_')" <1> + "source": "/\\s/.matcher(doc['tokenstring2'].value).replaceAll('_')" <1> } } } @@ -422,7 +422,7 @@ POST _ml/datafeeds/datafeed-test2/_update "my_script_field": { "script": { "lang": "painless", - "inline": "def m = /(.*)-bar-([0-9][0-9])/.matcher(doc['tokenstring3'].value); return m.find() ? m.group(1) + '_' + m.group(2) : '';" <1> + "source": "def m = /(.*)-bar-([0-9][0-9])/.matcher(doc['tokenstring3'].value); return m.find() ? m.group(1) + '_' + m.group(2) : '';" <1> } } } @@ -554,7 +554,7 @@ PUT _ml/datafeeds/datafeed-test4 "script_fields": { "my_coordinates": { "script": { - "inline": "doc['coords.lat'].value + ',' + doc['coords.lon'].value", + "source": "doc['coords.lat'].value + ',' + doc['coords.lon'].value", "lang": "painless" } } From 5901b4286ec4a91018c8e6a538a5e73216b734a3 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 29 Mar 2019 11:29:54 -0700 Subject: [PATCH 34/63] Update vector similarity examples to avoid negative scores. (#40493) Negative scores are no longer allowed, but the cosine similarity between two vectors lies in the range [-1, 1], and dot products can also be negative. This commit updates the documentation with an example of how to avoid negative scores. --- .../query-dsl/script-score-query.asciidoc | 33 +++++++++++++------ 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/docs/reference/query-dsl/script-score-query.asciidoc b/docs/reference/query-dsl/script-score-query.asciidoc index 56c4f7c41b8ee..f3f0bc8af48c9 100644 --- a/docs/reference/query-dsl/script-score-query.asciidoc +++ b/docs/reference/query-dsl/script-score-query.asciidoc @@ -36,6 +36,10 @@ GET /_search // CONSOLE // TEST[setup:twitter] +NOTE: The values returned from `script_score` cannot be negative. In general, +Lucene requires the scores produced by queries to be non-negative in order to +support certain search optimizations. + ==== Accessing the score of a document within a script Within a script, you can @@ -92,9 +96,9 @@ cosine similarity between a given query vector and document vectors. "match_all": {} }, "script": { - "source": "cosineSimilarity(params.queryVector, doc['my_dense_vector'])", + "source": "cosineSimilarity(params.query_vector, doc['my_dense_vector']) + 1.0" <1>, "params": { - "queryVector": [4, 3.4, -0.2] <1> + "query_vector": [4, 3.4, -0.2] <2> } } } @@ -102,7 +106,8 @@ cosine similarity between a given query vector and document vectors. } -------------------------------------------------- // NOTCONSOLE -<1> To take advantage of the script optimizations, provide a query vector as a script parameter. +<1> The script adds 1.0 to the cosine similarity to prevent the score from being negative. +<2> To take advantage of the script optimizations, provide a query vector as a script parameter. Similarly, for sparse_vector fields, `cosineSimilaritySparse` calculates cosine similarity between a given query vector and document vectors. @@ -116,9 +121,9 @@ between a given query vector and document vectors. "match_all": {} }, "script": { - "source": "cosineSimilaritySparse(params.queryVector, doc['my_sparse_vector'])", + "source": "cosineSimilaritySparse(params.query_vector, doc['my_sparse_vector']) + 1.0", "params": { - "queryVector": {"2": 0.5, "10" : 111.3, "50": -1.3, "113": 14.8, "4545": 156.0} + "query_vector": {"2": 0.5, "10" : 111.3, "50": -1.3, "113": 14.8, "4545": 156.0} } } } @@ -139,9 +144,12 @@ dot product between a given query vector and document vectors. "match_all": {} }, "script": { - "source": "dotProduct(params.queryVector, doc['my_dense_vector'])", + "source": """ + double value = dotProduct(params.query_vector, doc['my_vector']); + return sigmoid(1, Math.E, -value); <1> + """, "params": { - "queryVector": [4, 3.4, -0.2] + "query_vector": [4, 3.4, -0.2] } } } @@ -150,6 +158,8 @@ dot product between a given query vector and document vectors. -------------------------------------------------- // NOTCONSOLE +<1> Using the standard sigmoid function prevents scores from being negative. + Similarly, for sparse_vector fields, `dotProductSparse` calculates dot product between a given query vector and document vectors. @@ -162,9 +172,12 @@ between a given query vector and document vectors. "match_all": {} }, "script": { - "source": "dotProductSparse(params.queryVector, doc['my_sparse_vector'])", - "params": { - "queryVector": {"2": 0.5, "10" : 111.3, "50": -1.3, "113": 14.8, "4545": 156.0} + "source": """ + double value = dotProductSparse(params.query_vector, doc['my_sparse_vector']); + return sigmoid(1, Math.E, -value); + """, + "params": { + "query_vector": {"2": 0.5, "10" : 111.3, "50": -1.3, "113": 14.8, "4545": 156.0} } } } From 1e73d14fca69efb8cb9d2007c8e8f623a81a3b06 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 29 Mar 2019 13:24:08 -0700 Subject: [PATCH 35/63] disable kerberos test until kerberos fixture is working again --- x-pack/qa/kerberos-tests/build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 50b709f77dca5..88248f89b72c5 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -8,6 +8,9 @@ apply plugin: 'elasticsearch.test.fixtures' testFixtures.useFixture ":test:fixtures:krb5kdc-fixture" +// https://github.com/elastic/elasticsearch/issues/40624 +integTest.enabled = false + dependencies { testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') From 237a2c4d47faed1bbee7c30be4228ada52789eb1 Mon Sep 17 00:00:00 2001 From: Omkar Deshpande Date: Sat, 30 Mar 2019 02:49:26 +0530 Subject: [PATCH 36/63] Update version numbers in Elasticsearch-Py docs (#40355) --- docs/python/index.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/python/index.asciidoc b/docs/python/index.asciidoc index ea1b6a837b58f..8def003e7c2ab 100644 --- a/docs/python/index.asciidoc +++ b/docs/python/index.asciidoc @@ -58,8 +58,8 @@ The recommended way to set your requirements in your `setup.py` or [source,txt] ------------------------------------ - # Elasticsearch 5.x - elasticsearch>=5.0.0,<6.0.0 + # Elasticsearch 6.x + elasticsearch>=6.0.0,<7.0.0 # Elasticsearch 2.x elasticsearch2 From 0ad3d90a36529bf369813ea6253f305e11aff2e9 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 29 Mar 2019 14:20:53 -0700 Subject: [PATCH 37/63] Add notice for bundled jdk (#40576) * Add notice for bundled jdk This commit adds the license/notice for the bundled openjdk. * First draft * iteration * Fix package notices * Iteration * One more iteration --- distribution/archives/build.gradle | 2 +- distribution/build.gradle | 30 +- distribution/licenses/openjdk-LICENSE.txt | 347 ++++++++++++++++++++++ distribution/licenses/openjdk-NOTICE.txt | 5 + distribution/packages/build.gradle | 4 +- 5 files changed, 379 insertions(+), 9 deletions(-) create mode 100644 distribution/licenses/openjdk-LICENSE.txt create mode 100644 distribution/licenses/openjdk-NOTICE.txt diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index f6b27b0a2dd26..b48ebeb2c6ac0 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -84,7 +84,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla rename { 'LICENSE.txt' } } - with noticeFile + with noticeFile(oss, jdk) into('modules') { with modulesFiles } diff --git a/distribution/build.gradle b/distribution/build.gradle index 99a3e15986905..e27d37bb513a0 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -52,11 +52,17 @@ task buildServerNotice(type: NoticeTask) { // other distributions include notices from modules as well, which are added below later task buildDefaultNotice(type: NoticeTask) { licensesDir new File(project(':server').projectDir, 'licenses') + licensesDir new File(project(':distribution').projectDir, 'licenses') } - -// other distributions include notices from modules as well, which are added below later task buildOssNotice(type: NoticeTask) { licensesDir new File(project(':server').projectDir, 'licenses') + licensesDir new File(project(':distribution').projectDir, 'licenses') +} +task buildDefaultNoJdkNotice(type: NoticeTask) { + licensesDir new File(project(':server').projectDir, 'licenses') +} +task buildOssNoJdkNotice(type: NoticeTask) { + licensesDir new File(project(':server').projectDir, 'licenses') } /***************************************************************************** @@ -377,11 +383,21 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } } - noticeFile = copySpec { - if (project.name == 'integ-test-zip') { - from buildServerNotice - } else { - from buildDefaultNotice + noticeFile = { oss, jdk -> + copySpec { + if (project.name == 'integ-test-zip') { + from buildServerNotice + } else { + if (oss && jdk) { + from buildOssNotice + } else if (oss) { + from buildOssNoJdkNotice + } else if (jdk) { + from buildDefaultNotice + } else { + from buildDefaultNoJdkNotice + } + } } } diff --git a/distribution/licenses/openjdk-LICENSE.txt b/distribution/licenses/openjdk-LICENSE.txt new file mode 100644 index 0000000000000..b40a0f457d75c --- /dev/null +++ b/distribution/licenses/openjdk-LICENSE.txt @@ -0,0 +1,347 @@ +The GNU General Public License (GPL) + +Version 2, June 1991 + +Copyright (C) 1989, 1991 Free Software Foundation, Inc. +59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +Everyone is permitted to copy and distribute verbatim copies of this license +document, but changing it is not allowed. + +Preamble + +The licenses for most software are designed to take away your freedom to share +and change it. By contrast, the GNU General Public License is intended to +guarantee your freedom to share and change free software--to make sure the +software is free for all its users. This General Public License applies to +most of the Free Software Foundation's software and to any other program whose +authors commit to using it. (Some other Free Software Foundation software is +covered by the GNU Library General Public License instead.) You can apply it to +your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our +General Public Licenses are designed to make sure that you have the freedom to +distribute copies of free software (and charge for this service if you wish), +that you receive source code or can get it if you want it, that you can change +the software or use pieces of it in new free programs; and that you know you +can do these things. + +To protect your rights, we need to make restrictions that forbid anyone to deny +you these rights or to ask you to surrender the rights. These restrictions +translate to certain responsibilities for you if you distribute copies of the +software, or if you modify it. + +For example, if you distribute copies of such a program, whether gratis or for +a fee, you must give the recipients all the rights that you have. You must +make sure that they, too, receive or can get the source code. And you must +show them these terms so they know their rights. + +We protect your rights with two steps: (1) copyright the software, and (2) +offer you this license which gives you legal permission to copy, distribute +and/or modify the software. + +Also, for each author's protection and ours, we want to make certain that +everyone understands that there is no warranty for this free software. If the +software is modified by someone else and passed on, we want its recipients to +know that what they have is not the original, so that any problems introduced +by others will not reflect on the original authors' reputations. + +Finally, any free program is threatened constantly by software patents. We +wish to avoid the danger that redistributors of a free program will +individually obtain patent licenses, in effect making the program proprietary. +To prevent this, we have made it clear that any patent must be licensed for +everyone's free use or not licensed at all. + +The precise terms and conditions for copying, distribution and modification +follow. + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + +0. This License applies to any program or other work which contains a notice +placed by the copyright holder saying it may be distributed under the terms of +this General Public License. The "Program", below, refers to any such program +or work, and a "work based on the Program" means either the Program or any +derivative work under copyright law: that is to say, a work containing the +Program or a portion of it, either verbatim or with modifications and/or +translated into another language. (Hereinafter, translation is included +without limitation in the term "modification".) Each licensee is addressed as +"you". + +Activities other than copying, distribution and modification are not covered by +this License; they are outside its scope. The act of running the Program is +not restricted, and the output from the Program is covered only if its contents +constitute a work based on the Program (independent of having been made by +running the Program). Whether that is true depends on what the Program does. + +1. You may copy and distribute verbatim copies of the Program's source code as +you receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice and +disclaimer of warranty; keep intact all the notices that refer to this License +and to the absence of any warranty; and give any other recipients of the +Program a copy of this License along with the Program. + +You may charge a fee for the physical act of transferring a copy, and you may +at your option offer warranty protection in exchange for a fee. + +2. You may modify your copy or copies of the Program or any portion of it, thus +forming a work based on the Program, and copy and distribute such modifications +or work under the terms of Section 1 above, provided that you also meet all of +these conditions: + + a) You must cause the modified files to carry prominent notices stating + that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in whole or + in part contains or is derived from the Program or any part thereof, to be + licensed as a whole at no charge to all third parties under the terms of + this License. + + c) If the modified program normally reads commands interactively when run, + you must cause it, when started running for such interactive use in the + most ordinary way, to print or display an announcement including an + appropriate copyright notice and a notice that there is no warranty (or + else, saying that you provide a warranty) and that users may redistribute + the program under these conditions, and telling the user how to view a copy + of this License. (Exception: if the Program itself is interactive but does + not normally print such an announcement, your work based on the Program is + not required to print an announcement.) + +These requirements apply to the modified work as a whole. If identifiable +sections of that work are not derived from the Program, and can be reasonably +considered independent and separate works in themselves, then this License, and +its terms, do not apply to those sections when you distribute them as separate +works. But when you distribute the same sections as part of a whole which is a +work based on the Program, the distribution of the whole must be on the terms +of this License, whose permissions for other licensees extend to the entire +whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest your +rights to work written entirely by you; rather, the intent is to exercise the +right to control the distribution of derivative or collective works based on +the Program. + +In addition, mere aggregation of another work not based on the Program with the +Program (or with a work based on the Program) on a volume of a storage or +distribution medium does not bring the other work under the scope of this +License. + +3. You may copy and distribute the Program (or a work based on it, under +Section 2) in object code or executable form under the terms of Sections 1 and +2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable source + code, which must be distributed under the terms of Sections 1 and 2 above + on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three years, to + give any third party, for a charge no more than your cost of physically + performing source distribution, a complete machine-readable copy of the + corresponding source code, to be distributed under the terms of Sections 1 + and 2 above on a medium customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer to + distribute corresponding source code. (This alternative is allowed only + for noncommercial distribution and only if you received the program in + object code or executable form with such an offer, in accord with + Subsection b above.) + +The source code for a work means the preferred form of the work for making +modifications to it. For an executable work, complete source code means all +the source code for all modules it contains, plus any associated interface +definition files, plus the scripts used to control compilation and installation +of the executable. However, as a special exception, the source code +distributed need not include anything that is normally distributed (in either +source or binary form) with the major components (compiler, kernel, and so on) +of the operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the source +code from the same place counts as distribution of the source code, even though +third parties are not compelled to copy the source along with the object code. + +4. You may not copy, modify, sublicense, or distribute the Program except as +expressly provided under this License. Any attempt otherwise to copy, modify, +sublicense or distribute the Program is void, and will automatically terminate +your rights under this License. However, parties who have received copies, or +rights, from you under this License will not have their licenses terminated so +long as such parties remain in full compliance. + +5. You are not required to accept this License, since you have not signed it. +However, nothing else grants you permission to modify or distribute the Program +or its derivative works. These actions are prohibited by law if you do not +accept this License. Therefore, by modifying or distributing the Program (or +any work based on the Program), you indicate your acceptance of this License to +do so, and all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + +6. Each time you redistribute the Program (or any work based on the Program), +the recipient automatically receives a license from the original licensor to +copy, distribute or modify the Program subject to these terms and conditions. +You may not impose any further restrictions on the recipients' exercise of the +rights granted herein. You are not responsible for enforcing compliance by +third parties to this License. + +7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), conditions +are imposed on you (whether by court order, agreement or otherwise) that +contradict the conditions of this License, they do not excuse you from the +conditions of this License. If you cannot distribute so as to satisfy +simultaneously your obligations under this License and any other pertinent +obligations, then as a consequence you may not distribute the Program at all. +For example, if a patent license would not permit royalty-free redistribution +of the Program by all those who receive copies directly or indirectly through +you, then the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply and +the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any patents or +other property right claims or to contest validity of any such claims; this +section has the sole purpose of protecting the integrity of the free software +distribution system, which is implemented by public license practices. Many +people have made generous contributions to the wide range of software +distributed through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing to +distribute software through any other system and a licensee cannot impose that +choice. + +This section is intended to make thoroughly clear what is believed to be a +consequence of the rest of this License. + +8. If the distribution and/or use of the Program is restricted in certain +countries either by patents or by copyrighted interfaces, the original +copyright holder who places the Program under this License may add an explicit +geographical distribution limitation excluding those countries, so that +distribution is permitted only in or among countries not thus excluded. In +such case, this License incorporates the limitation as if written in the body +of this License. + +9. The Free Software Foundation may publish revised and/or new versions of the +General Public License from time to time. Such new versions will be similar in +spirit to the present version, but may differ in detail to address new problems +or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any later +version", you have the option of following the terms and conditions either of +that version or of any later version published by the Free Software Foundation. +If the Program does not specify a version number of this License, you may +choose any version ever published by the Free Software Foundation. + +10. If you wish to incorporate parts of the Program into other free programs +whose distribution conditions are different, write to the author to ask for +permission. For software which is copyrighted by the Free Software Foundation, +write to the Free Software Foundation; we sometimes make exceptions for this. +Our decision will be guided by the two goals of preserving the free status of +all derivatives of our free software and of promoting the sharing and reuse of +software generally. + +NO WARRANTY + +11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR +THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE +STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE +PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, +INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND +PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, +YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL +ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE +PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR +INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA +BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER +OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible +use to the public, the best way to achieve this is to make it free software +which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach +them to the start of each source file to most effectively convey the exclusion +of warranty; and each file should have at least the "copyright" line and a +pointer to where the full notice is found. + + One line to give the program's name and a brief idea of what it does. + + Copyright (C) + + This program is free software; you can redistribute it and/or modify it + under the terms of the GNU General Public License as published by the Free + Software Foundation; either version 2 of the License, or (at your option) + any later version. + + This program is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., 59 + Temple Place, Suite 330, Boston, MA 02111-1307 USA + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this when it +starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author Gnomovision comes + with ABSOLUTELY NO WARRANTY; for details type 'show w'. This is free + software, and you are welcome to redistribute it under certain conditions; + type 'show c' for details. + +The hypothetical commands 'show w' and 'show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may be +called something other than 'show w' and 'show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your school, +if any, to sign a "copyright disclaimer" for the program, if necessary. Here +is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + 'Gnomovision' (which makes passes at compilers) written by James Hacker. + + signature of Ty Coon, 1 April 1989 + + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Library General Public +License instead of this License. + + +"CLASSPATH" EXCEPTION TO THE GPL + +Certain source files distributed by Oracle America and/or its affiliates are +subject to the following clarification and special exception to the GPL, but +only where Oracle has expressly included in the particular source file's header +the words "Oracle designates this particular file as subject to the "Classpath" +exception as provided by Oracle in the LICENSE file that accompanied this code." + + Linking this library statically or dynamically with other modules is making + a combined work based on this library. Thus, the terms and conditions of + the GNU General Public License cover the whole combination. + + As a special exception, the copyright holders of this library give you + permission to link this library with independent modules to produce an + executable, regardless of the license terms of these independent modules, + and to copy and distribute the resulting executable under terms of your + choice, provided that you also meet, for each linked independent module, + the terms and conditions of the license of that module. An independent + module is a module which is not derived from or based on this library. If + you modify this library, you may extend this exception to your version of + the library, but you are not obligated to do so. If you do not wish to do + so, delete this exception statement from your version. diff --git a/distribution/licenses/openjdk-NOTICE.txt b/distribution/licenses/openjdk-NOTICE.txt new file mode 100644 index 0000000000000..ca5ac831c8526 --- /dev/null +++ b/distribution/licenses/openjdk-NOTICE.txt @@ -0,0 +1,5 @@ +Copyright (c) 1995, 2013, Oracle and/or its affiliates. + +OpenJDK is licensed under the GPLv2+CE. A copy of that license is included in +this distribution immediately below this notice. You can find a copy of the +OpenJDK source through the downloads page at https://elastic.co. diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 7fd8f03338cdf..1c7fc466ba845 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -260,6 +260,9 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) { // the oss package conflicts with the default distribution and vice versa conflicts('elasticsearch' + (oss ? '' : '-oss')) + + into '/usr/share/elasticsearch' + with noticeFile(oss, jdk) } } @@ -294,7 +297,6 @@ ospackage { permissionGroup 'root' into '/usr/share/elasticsearch' - with noticeFile } Closure commonDebConfig(boolean oss, boolean jdk) { From c9d05adeddde9ae09e578c858f183bd8b4e20946 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 29 Mar 2019 23:38:37 +0100 Subject: [PATCH 38/63] Clarify using time_zone and date math in range query (#40655) Currently, the docs correctly state that using `now` in range queries will not be affected by the `time_zone` parameter. However, using date math roundings like e.g. `now\d` will be affected by the `time_zone`. Adding this example because it seems to be a frequently asked question and source of confusion. Relates to #40581 --- docs/reference/query-dsl/range-query.asciidoc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/range-query.asciidoc b/docs/reference/query-dsl/range-query.asciidoc index c5087d52f905e..61c46996949f1 100644 --- a/docs/reference/query-dsl/range-query.asciidoc +++ b/docs/reference/query-dsl/range-query.asciidoc @@ -138,7 +138,10 @@ GET _search -------------------------------------------------- // CONSOLE <1> This date will be converted to `2014-12-31T23:00:00 UTC`. -<2> `now` is not affected by the `time_zone` parameter (dates must be stored as UTC). +<2> `now` is not affected by the `time_zone` parameter, its always the current system time (in UTC). +However, when using <> (e.g. down to the nearest day using `now/d`), +the provided `time_zone` will be considered. + [[querying-range-fields]] ==== Querying range fields From e24fd1b076fe56c5c041de34e5075b59c56d19d9 Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Sat, 30 Mar 2019 00:43:15 +0100 Subject: [PATCH 39/63] Reindex conflicts clarification (docs) (#40442) Made it more clear that conflicts : proceed only affects version conflicts. --- docs/reference/docs/reindex.asciidoc | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index e76b6ed93cbd7..b085e081b4dd7 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -118,8 +118,11 @@ POST _reindex // CONSOLE // TEST[setup:twitter] -By default, version conflicts abort the `_reindex` process, but you can just -count them by setting `"conflicts": "proceed"` in the request body: +By default, version conflicts abort the `_reindex` process. The `"conflicts"` request body +parameter can be used to instruct `_reindex` to proceed with the next document on version conflicts. +It is important to note that the handling of other error types is unaffected by the `"conflicts"` parameter. +When `"conflicts": "proceed"` is set in the request body, the `_reindex` process will continue on version conflicts +and return a count of version conflicts encountered: [source,js] -------------------------------------------------- From 3dd0384d683940871f5d061f153b70c0420150fa Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Sat, 30 Mar 2019 12:05:15 +0100 Subject: [PATCH 40/63] SQL: [Docs] Fix doc errors regarding CURRENT_DATE. (#40649) Some parts wrongly refered to CURRENT_TIMESTAMP. --- .../reference/sql/functions/date-time.asciidoc | 8 +++++++- .../qa/src/main/resources/docs/docs.csv-spec | 18 ++++++++++++++---- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index d4beaa34bd774..f3060d81b3ffc 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -99,6 +99,7 @@ Functions that target date/time. -------------------------------------------------- CURRENT_DATE CURRENT_DATE() +CURDATE() -------------------------------------------------- *Input*: _none_ @@ -117,7 +118,12 @@ This method always returns the same value for its every occurrence within the sa ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/docs/docs.csv-spec[curDate] +include-tagged::{sql-specs}/docs/docs.csv-spec[currentDate] +-------------------------------------------------- + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/docs.csv-spec[currentDateFunction] -------------------------------------------------- ["source","sql",subs="attributes,callouts,macros"] diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index 820c358ab2f62..9e8bad1c88f11 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -2359,18 +2359,28 @@ SELECT WEEK(CAST('1988-01-05T09:22:10Z' AS TIMESTAMP)) AS week, ISOWEEK(CAST('19 currentDate-Ignore -// tag::curDate -SELECT CURRENT_TIMESTAMP AS result; +// tag::currentDate +SELECT CURRENT_DATE AS result; result ------------------------ 2018-12-12 -// end::curDate +// end::currentDate ; currentDateFunction-Ignore +// tag::currentDateFunction +SELECT CURRENT_DATE() AS result; + + result +------------------------ +2018-12-12 +// end::currentDateFunction +; + +curDateFunction-Ignore // tag::curDateFunction -SELECT CURRENT_TIMESTAMP() AS result; +SELECT CURRENT_DATE() AS result; result ------------------------ From 35d731754455ec91e20190a874592a740c66aa44 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Sat, 30 Mar 2019 12:41:58 +0100 Subject: [PATCH 41/63] SQL: [Docs] Fix example for CURDATE Follows: 3dd0384d683940871f5d061f153b70c0420150fa --- x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index 9e8bad1c88f11..93a693a69b476 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -2380,7 +2380,7 @@ SELECT CURRENT_DATE() AS result; curDateFunction-Ignore // tag::curDateFunction -SELECT CURRENT_DATE() AS result; +SELECT CURDATE() AS result; result ------------------------ From e34cc2f38b1477e78788ee377938f42cc47187c7 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Sat, 30 Mar 2019 12:51:13 +0100 Subject: [PATCH 42/63] SQL: Fix precedence of `::` psql like CAST operator (#40665) Previously, an expression like `10 + 2::long` would be interpreted as `CAST(10 + 2 AS LONG)` instead of `10 + CAST(2 AS LONG)`. --- x-pack/plugin/sql/src/main/antlr/SqlBase.g4 | 2 +- .../xpack/sql/parser/ExpressionBuilder.java | 2 +- .../xpack/sql/parser/SqlBaseBaseListener.java | 40 +- .../xpack/sql/parser/SqlBaseBaseVisitor.java | 20 +- .../xpack/sql/parser/SqlBaseListener.java | 80 +- .../xpack/sql/parser/SqlBaseParser.java | 1010 +++++++++-------- .../xpack/sql/parser/SqlBaseVisitor.java | 42 +- .../xpack/sql/parser/ExpressionTests.java | 13 + 8 files changed, 633 insertions(+), 576 deletions(-) diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 index eca6f27087189..d1f1d15cab9f5 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 @@ -210,11 +210,11 @@ valueExpression | left=valueExpression operator=(ASTERISK | SLASH | PERCENT) right=valueExpression #arithmeticBinary | left=valueExpression operator=(PLUS | MINUS) right=valueExpression #arithmeticBinary | left=valueExpression comparisonOperator right=valueExpression #comparison - | valueExpression CAST_OP dataType #castOperatorExpression ; primaryExpression : castExpression #cast + | primaryExpression CAST_OP dataType #castOperatorExpression | extractExpression #extract | builtinDateTimeFunction #currentDateTimeFunction | constant #constantDefault diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 46d8148ed25b6..c3d5ba2228467 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -416,7 +416,7 @@ private static DataType dataType(Source ctx, String string) { @Override public Object visitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx) { - return new Cast(source(ctx), expression(ctx.valueExpression()), typedParsing(ctx.dataType(), DataType.class)); + return new Cast(source(ctx), expression(ctx.primaryExpression()), typedParsing(ctx.dataType(), DataType.class)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java index 07352baa93e82..bf5055ae21615 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java @@ -604,37 +604,37 @@ class SqlBaseBaseListener implements SqlBaseListener { * *

The default implementation does nothing.

*/ - @Override public void enterCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx) { } + @Override public void enterArithmeticBinary(SqlBaseParser.ArithmeticBinaryContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx) { } + @Override public void exitArithmeticBinary(SqlBaseParser.ArithmeticBinaryContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void enterArithmeticBinary(SqlBaseParser.ArithmeticBinaryContext ctx) { } + @Override public void enterArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitArithmeticBinary(SqlBaseParser.ArithmeticBinaryContext ctx) { } + @Override public void exitArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void enterArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx) { } + @Override public void enterDereference(SqlBaseParser.DereferenceContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx) { } + @Override public void exitDereference(SqlBaseParser.DereferenceContext ctx) { } /** * {@inheritDoc} * @@ -652,37 +652,37 @@ class SqlBaseBaseListener implements SqlBaseListener { * *

The default implementation does nothing.

*/ - @Override public void enterExtract(SqlBaseParser.ExtractContext ctx) { } + @Override public void enterConstantDefault(SqlBaseParser.ConstantDefaultContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitExtract(SqlBaseParser.ExtractContext ctx) { } + @Override public void exitConstantDefault(SqlBaseParser.ConstantDefaultContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void enterCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) { } + @Override public void enterExtract(SqlBaseParser.ExtractContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) { } + @Override public void exitExtract(SqlBaseParser.ExtractContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void enterConstantDefault(SqlBaseParser.ConstantDefaultContext ctx) { } + @Override public void enterParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitConstantDefault(SqlBaseParser.ConstantDefaultContext ctx) { } + @Override public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { } /** * {@inheritDoc} * @@ -700,49 +700,49 @@ class SqlBaseBaseListener implements SqlBaseListener { * *

The default implementation does nothing.

*/ - @Override public void enterFunction(SqlBaseParser.FunctionContext ctx) { } + @Override public void enterCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitFunction(SqlBaseParser.FunctionContext ctx) { } + @Override public void exitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void enterSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) { } + @Override public void enterFunction(SqlBaseParser.FunctionContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) { } + @Override public void exitFunction(SqlBaseParser.FunctionContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void enterDereference(SqlBaseParser.DereferenceContext ctx) { } + @Override public void enterCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitDereference(SqlBaseParser.DereferenceContext ctx) { } + @Override public void exitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void enterParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { } + @Override public void enterSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { } + @Override public void exitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index dbede024d7321..02d92832149af 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -360,21 +360,21 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx) { return visitChildren(ctx); } + @Override public T visitArithmeticBinary(SqlBaseParser.ArithmeticBinaryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitArithmeticBinary(SqlBaseParser.ArithmeticBinaryContext ctx) { return visitChildren(ctx); } + @Override public T visitArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx) { return visitChildren(ctx); } + @Override public T visitDereference(SqlBaseParser.DereferenceContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -388,21 +388,21 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtract(SqlBaseParser.ExtractContext ctx) { return visitChildren(ctx); } + @Override public T visitConstantDefault(SqlBaseParser.ConstantDefaultContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) { return visitChildren(ctx); } + @Override public T visitExtract(SqlBaseParser.ExtractContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitConstantDefault(SqlBaseParser.ConstantDefaultContext ctx) { return visitChildren(ctx); } + @Override public T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -416,28 +416,28 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitFunction(SqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); } + @Override public T visitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) { return visitChildren(ctx); } + @Override public T visitFunction(SqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitDereference(SqlBaseParser.DereferenceContext ctx) { return visitChildren(ctx); } + @Override public T visitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); } + @Override public T visitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java index d6ad9907b6fbf..9d9bc1a23c080 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java @@ -549,18 +549,6 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitComparison(SqlBaseParser.ComparisonContext ctx); - /** - * Enter a parse tree produced by the {@code castOperatorExpression} - * labeled alternative in {@link SqlBaseParser#valueExpression}. - * @param ctx the parse tree - */ - void enterCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx); - /** - * Exit a parse tree produced by the {@code castOperatorExpression} - * labeled alternative in {@link SqlBaseParser#valueExpression}. - * @param ctx the parse tree - */ - void exitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx); /** * Enter a parse tree produced by the {@code arithmeticBinary} * labeled alternative in {@link SqlBaseParser#valueExpression}. @@ -585,6 +573,18 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx); + /** + * Enter a parse tree produced by the {@code dereference} + * labeled alternative in {@link SqlBaseParser#primaryExpression}. + * @param ctx the parse tree + */ + void enterDereference(SqlBaseParser.DereferenceContext ctx); + /** + * Exit a parse tree produced by the {@code dereference} + * labeled alternative in {@link SqlBaseParser#primaryExpression}. + * @param ctx the parse tree + */ + void exitDereference(SqlBaseParser.DereferenceContext ctx); /** * Enter a parse tree produced by the {@code cast} * labeled alternative in {@link SqlBaseParser#primaryExpression}. @@ -598,41 +598,41 @@ interface SqlBaseListener extends ParseTreeListener { */ void exitCast(SqlBaseParser.CastContext ctx); /** - * Enter a parse tree produced by the {@code extract} + * Enter a parse tree produced by the {@code constantDefault} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterExtract(SqlBaseParser.ExtractContext ctx); + void enterConstantDefault(SqlBaseParser.ConstantDefaultContext ctx); /** - * Exit a parse tree produced by the {@code extract} + * Exit a parse tree produced by the {@code constantDefault} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitExtract(SqlBaseParser.ExtractContext ctx); + void exitConstantDefault(SqlBaseParser.ConstantDefaultContext ctx); /** - * Enter a parse tree produced by the {@code currentDateTimeFunction} + * Enter a parse tree produced by the {@code extract} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx); + void enterExtract(SqlBaseParser.ExtractContext ctx); /** - * Exit a parse tree produced by the {@code currentDateTimeFunction} + * Exit a parse tree produced by the {@code extract} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx); + void exitExtract(SqlBaseParser.ExtractContext ctx); /** - * Enter a parse tree produced by the {@code constantDefault} + * Enter a parse tree produced by the {@code parenthesizedExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterConstantDefault(SqlBaseParser.ConstantDefaultContext ctx); + void enterParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); /** - * Exit a parse tree produced by the {@code constantDefault} + * Exit a parse tree produced by the {@code parenthesizedExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitConstantDefault(SqlBaseParser.ConstantDefaultContext ctx); + void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); /** * Enter a parse tree produced by the {@code star} * labeled alternative in {@link SqlBaseParser#primaryExpression}. @@ -646,53 +646,53 @@ interface SqlBaseListener extends ParseTreeListener { */ void exitStar(SqlBaseParser.StarContext ctx); /** - * Enter a parse tree produced by the {@code function} + * Enter a parse tree produced by the {@code castOperatorExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterFunction(SqlBaseParser.FunctionContext ctx); + void enterCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx); /** - * Exit a parse tree produced by the {@code function} + * Exit a parse tree produced by the {@code castOperatorExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitFunction(SqlBaseParser.FunctionContext ctx); + void exitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx); /** - * Enter a parse tree produced by the {@code subqueryExpression} + * Enter a parse tree produced by the {@code function} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx); + void enterFunction(SqlBaseParser.FunctionContext ctx); /** - * Exit a parse tree produced by the {@code subqueryExpression} + * Exit a parse tree produced by the {@code function} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx); + void exitFunction(SqlBaseParser.FunctionContext ctx); /** - * Enter a parse tree produced by the {@code dereference} + * Enter a parse tree produced by the {@code currentDateTimeFunction} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterDereference(SqlBaseParser.DereferenceContext ctx); + void enterCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx); /** - * Exit a parse tree produced by the {@code dereference} + * Exit a parse tree produced by the {@code currentDateTimeFunction} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitDereference(SqlBaseParser.DereferenceContext ctx); + void exitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx); /** - * Enter a parse tree produced by the {@code parenthesizedExpression} + * Enter a parse tree produced by the {@code subqueryExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); + void enterSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx); /** - * Exit a parse tree produced by the {@code parenthesizedExpression} + * Exit a parse tree produced by the {@code subqueryExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); + void exitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx); /** * Enter a parse tree produced by {@link SqlBaseParser#builtinDateTimeFunction}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 34af98c1afcdf..4f94d8d191a98 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -3723,29 +3723,6 @@ public T accept(ParseTreeVisitor visitor) { else return visitor.visitChildren(this); } } - public static class CastOperatorExpressionContext extends ValueExpressionContext { - public ValueExpressionContext valueExpression() { - return getRuleContext(ValueExpressionContext.class,0); - } - public TerminalNode CAST_OP() { return getToken(SqlBaseParser.CAST_OP, 0); } - public DataTypeContext dataType() { - return getRuleContext(DataTypeContext.class,0); - } - public CastOperatorExpressionContext(ValueExpressionContext ctx) { copyFrom(ctx); } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterCastOperatorExpression(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitCastOperatorExpression(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitCastOperatorExpression(this); - else return visitor.visitChildren(this); - } - } public static class ArithmeticBinaryContext extends ValueExpressionContext { public ValueExpressionContext left; public Token operator; @@ -3884,7 +3861,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _prevctx = _localctx; setState(549); - primaryExpression(); + primaryExpression(0); } break; case PLUS: @@ -3902,14 +3879,14 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti consume(); } setState(551); - valueExpression(5); + valueExpression(4); } break; default: throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(569); + setState(566); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,77,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -3917,7 +3894,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(567); + setState(564); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { case 1: @@ -3926,7 +3903,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); setState(554); - if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); + if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); setState(555); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); @@ -3936,7 +3913,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti consume(); } setState(556); - ((ArithmeticBinaryContext)_localctx).right = valueExpression(5); + ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; case 2: @@ -3945,7 +3922,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); setState(557); - if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); + if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); setState(558); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); @@ -3955,7 +3932,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti consume(); } setState(559); - ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); + ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; case 3: @@ -3964,29 +3941,17 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); setState(560); - if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); setState(561); comparisonOperator(); setState(562); - ((ComparisonContext)_localctx).right = valueExpression(3); - } - break; - case 4: - { - _localctx = new CastOperatorExpressionContext(new ValueExpressionContext(_parentctx, _parentState)); - pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(564); - if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(565); - match(CAST_OP); - setState(566); - dataType(); + ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(571); + setState(568); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,77,_ctx); } @@ -4130,6 +4095,29 @@ public T accept(ParseTreeVisitor visitor) { else return visitor.visitChildren(this); } } + public static class CastOperatorExpressionContext extends PrimaryExpressionContext { + public PrimaryExpressionContext primaryExpression() { + return getRuleContext(PrimaryExpressionContext.class,0); + } + public TerminalNode CAST_OP() { return getToken(SqlBaseParser.CAST_OP, 0); } + public DataTypeContext dataType() { + return getRuleContext(DataTypeContext.class,0); + } + public CastOperatorExpressionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterCastOperatorExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitCastOperatorExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitCastOperatorExpression(this); + else return visitor.visitChildren(this); + } + } public static class FunctionContext extends PrimaryExpressionContext { public FunctionExpressionContext functionExpression() { return getRuleContext(FunctionExpressionContext.class,0); @@ -4189,105 +4177,152 @@ public T accept(ParseTreeVisitor visitor) { } public final PrimaryExpressionContext primaryExpression() throws RecognitionException { - PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_primaryExpression); + return primaryExpression(0); + } + + private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionException { + ParserRuleContext _parentctx = _ctx; + int _parentState = getState(); + PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, _parentState); + PrimaryExpressionContext _prevctx = _localctx; + int _startState = 60; + enterRecursionRule(_localctx, 60, RULE_primaryExpression, _p); int _la; try { - setState(592); + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(590); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,79,_ctx) ) { case 1: - _localctx = new CastContext(_localctx); - enterOuterAlt(_localctx, 1); { - setState(572); + _localctx = new CastContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + + setState(570); castExpression(); } break; case 2: - _localctx = new ExtractContext(_localctx); - enterOuterAlt(_localctx, 2); { - setState(573); + _localctx = new ExtractContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(571); extractExpression(); } break; case 3: - _localctx = new CurrentDateTimeFunctionContext(_localctx); - enterOuterAlt(_localctx, 3); { - setState(574); + _localctx = new CurrentDateTimeFunctionContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(572); builtinDateTimeFunction(); } break; case 4: - _localctx = new ConstantDefaultContext(_localctx); - enterOuterAlt(_localctx, 4); { - setState(575); + _localctx = new ConstantDefaultContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(573); constant(); } break; case 5: - _localctx = new StarContext(_localctx); - enterOuterAlt(_localctx, 5); { - setState(579); + _localctx = new StarContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(577); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH) | (1L << OPTIMIZED))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { { - setState(576); + setState(574); qualifiedName(); - setState(577); + setState(575); match(DOT); } } - setState(581); + setState(579); match(ASTERISK); } break; case 6: - _localctx = new FunctionContext(_localctx); - enterOuterAlt(_localctx, 6); { - setState(582); + _localctx = new FunctionContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(580); functionExpression(); } break; case 7: - _localctx = new SubqueryExpressionContext(_localctx); - enterOuterAlt(_localctx, 7); { - setState(583); + _localctx = new SubqueryExpressionContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(581); match(T__0); - setState(584); + setState(582); query(); - setState(585); + setState(583); match(T__1); } break; case 8: - _localctx = new DereferenceContext(_localctx); - enterOuterAlt(_localctx, 8); { - setState(587); + _localctx = new DereferenceContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(585); qualifiedName(); } break; case 9: - _localctx = new ParenthesizedExpressionContext(_localctx); - enterOuterAlt(_localctx, 9); { - setState(588); + _localctx = new ParenthesizedExpressionContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(586); match(T__0); - setState(589); + setState(587); expression(); - setState(590); + setState(588); match(T__1); } break; } + _ctx.stop = _input.LT(-1); + setState(597); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,80,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + if ( _parseListeners!=null ) triggerExitRuleEvent(); + _prevctx = _localctx; + { + { + _localctx = new CastOperatorExpressionContext(new PrimaryExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); + setState(592); + if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); + setState(593); + match(CAST_OP); + setState(594); + dataType(); + } + } + } + setState(599); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,80,_ctx); + } + } } catch (RecognitionException re) { _localctx.exception = re; @@ -4295,7 +4330,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _errHandler.recover(this, re); } finally { - exitRule(); + unrollRecursionContexts(_parentctx); } return _localctx; } @@ -4330,21 +4365,21 @@ public final BuiltinDateTimeFunctionContext builtinDateTimeFunction() throws Rec enterRule(_localctx, 62, RULE_builtinDateTimeFunction); int _la; try { - setState(607); + setState(613); switch (_input.LA(1)) { case CURRENT_DATE: enterOuterAlt(_localctx, 1); { - setState(594); + setState(600); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_DATE); - setState(597); + setState(603); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,80,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,81,_ctx) ) { case 1: { - setState(595); + setState(601); match(T__0); - setState(596); + setState(602); match(T__1); } break; @@ -4354,25 +4389,25 @@ public final BuiltinDateTimeFunctionContext builtinDateTimeFunction() throws Rec case CURRENT_TIMESTAMP: enterOuterAlt(_localctx, 2); { - setState(599); - ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_TIMESTAMP); setState(605); + ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_TIMESTAMP); + setState(611); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,82,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,83,_ctx) ) { case 1: { - setState(600); + setState(606); match(T__0); - setState(602); + setState(608); _la = _input.LA(1); if (_la==INTEGER_VALUE) { { - setState(601); + setState(607); ((BuiltinDateTimeFunctionContext)_localctx).precision = match(INTEGER_VALUE); } } - setState(604); + setState(610); match(T__1); } break; @@ -4426,42 +4461,42 @@ public final CastExpressionContext castExpression() throws RecognitionException CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); enterRule(_localctx, 64, RULE_castExpression); try { - setState(619); + setState(625); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,84,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,85,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(609); + setState(615); castTemplate(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(610); + setState(616); match(FUNCTION_ESC); - setState(611); + setState(617); castTemplate(); - setState(612); + setState(618); match(ESC_END); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(614); + setState(620); convertTemplate(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(615); + setState(621); match(FUNCTION_ESC); - setState(616); + setState(622); convertTemplate(); - setState(617); + setState(623); match(ESC_END); } break; @@ -4512,17 +4547,17 @@ public final CastTemplateContext castTemplate() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(621); + setState(627); match(CAST); - setState(622); + setState(628); match(T__0); - setState(623); + setState(629); expression(); - setState(624); + setState(630); match(AS); - setState(625); + setState(631); dataType(); - setState(626); + setState(632); match(T__1); } } @@ -4570,17 +4605,17 @@ public final ConvertTemplateContext convertTemplate() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(628); + setState(634); match(CONVERT); - setState(629); + setState(635); match(T__0); - setState(630); + setState(636); expression(); - setState(631); + setState(637); match(T__2); - setState(632); + setState(638); dataType(); - setState(633); + setState(639); match(T__1); } } @@ -4624,23 +4659,23 @@ public final ExtractExpressionContext extractExpression() throws RecognitionExce ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); enterRule(_localctx, 70, RULE_extractExpression); try { - setState(640); + setState(646); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); { - setState(635); + setState(641); extractTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(636); + setState(642); match(FUNCTION_ESC); - setState(637); + setState(643); extractTemplate(); - setState(638); + setState(644); match(ESC_END); } break; @@ -4694,17 +4729,17 @@ public final ExtractTemplateContext extractTemplate() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(642); + setState(648); match(EXTRACT); - setState(643); + setState(649); match(T__0); - setState(644); + setState(650); ((ExtractTemplateContext)_localctx).field = identifier(); - setState(645); + setState(651); match(FROM); - setState(646); + setState(652); valueExpression(0); - setState(647); + setState(653); match(T__1); } } @@ -4747,7 +4782,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); enterRule(_localctx, 74, RULE_functionExpression); try { - setState(654); + setState(660); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: @@ -4793,18 +4828,18 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(649); + setState(655); functionTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(650); + setState(656); match(FUNCTION_ESC); - setState(651); + setState(657); functionTemplate(); - setState(652); + setState(658); match(ESC_END); } break; @@ -4862,45 +4897,45 @@ public final FunctionTemplateContext functionTemplate() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(656); + setState(662); functionName(); - setState(657); + setState(663); match(T__0); - setState(669); + setState(675); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RIGHT - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TRUE - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (FUNCTION_ESC - 67)) | (1L << (DATE_ESC - 67)) | (1L << (TIME_ESC - 67)) | (1L << (TIMESTAMP_ESC - 67)) | (1L << (GUID_ESC - 67)) | (1L << (PLUS - 67)) | (1L << (MINUS - 67)) | (1L << (ASTERISK - 67)) | (1L << (PARAM - 67)) | (1L << (STRING - 67)) | (1L << (INTEGER_VALUE - 67)) | (1L << (DECIMAL_VALUE - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { { - setState(659); + setState(665); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(658); + setState(664); setQuantifier(); } } - setState(661); + setState(667); expression(); - setState(666); + setState(672); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(662); + setState(668); match(T__2); - setState(663); + setState(669); expression(); } } - setState(668); + setState(674); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(671); + setState(677); match(T__1); } } @@ -4944,19 +4979,19 @@ public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); enterRule(_localctx, 78, RULE_functionName); try { - setState(676); + setState(682); switch (_input.LA(1)) { case LEFT: enterOuterAlt(_localctx, 1); { - setState(673); + setState(679); match(LEFT); } break; case RIGHT: enterOuterAlt(_localctx, 2); { - setState(674); + setState(680); match(RIGHT); } break; @@ -5002,7 +5037,7 @@ public final FunctionNameContext functionName() throws RecognitionException { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 3); { - setState(675); + setState(681); identifier(); } break; @@ -5233,13 +5268,13 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 80, RULE_constant); try { int _alt; - setState(704); + setState(710); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(678); + setState(684); match(NULL); } break; @@ -5247,7 +5282,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntervalLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(679); + setState(685); interval(); } break; @@ -5256,7 +5291,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(680); + setState(686); number(); } break; @@ -5265,7 +5300,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(681); + setState(687); booleanValue(); } break; @@ -5273,7 +5308,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(683); + setState(689); _errHandler.sync(this); _alt = 1; do { @@ -5281,7 +5316,7 @@ public final ConstantContext constant() throws RecognitionException { case 1: { { - setState(682); + setState(688); match(STRING); } } @@ -5289,9 +5324,9 @@ public final ConstantContext constant() throws RecognitionException { default: throw new NoViableAltException(this); } - setState(685); + setState(691); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,91,_ctx); + _alt = getInterpreter().adaptivePredict(_input,92,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; @@ -5299,7 +5334,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(687); + setState(693); match(PARAM); } break; @@ -5307,11 +5342,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DateEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(688); + setState(694); match(DATE_ESC); - setState(689); + setState(695); string(); - setState(690); + setState(696); match(ESC_END); } break; @@ -5319,11 +5354,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new TimeEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(692); + setState(698); match(TIME_ESC); - setState(693); + setState(699); string(); - setState(694); + setState(700); match(ESC_END); } break; @@ -5331,11 +5366,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new TimestampEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(696); + setState(702); match(TIMESTAMP_ESC); - setState(697); + setState(703); string(); - setState(698); + setState(704); match(ESC_END); } break; @@ -5343,11 +5378,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new GuidEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(700); + setState(706); match(GUID_ESC); - setState(701); + setState(707); string(); - setState(702); + setState(708); match(ESC_END); } break; @@ -5400,7 +5435,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(706); + setState(712); _la = _input.LA(1); if ( !(((((_la - 100)) & ~0x3f) == 0 && ((1L << (_la - 100)) & ((1L << (EQ - 100)) | (1L << (NULLEQ - 100)) | (1L << (NEQ - 100)) | (1L << (LT - 100)) | (1L << (LTE - 100)) | (1L << (GT - 100)) | (1L << (GTE - 100)))) != 0)) ) { _errHandler.recoverInline(this); @@ -5449,7 +5484,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(708); + setState(714); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -5517,13 +5552,13 @@ public final IntervalContext interval() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(710); + setState(716); match(INTERVAL); - setState(712); + setState(718); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(711); + setState(717); ((IntervalContext)_localctx).sign = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -5534,35 +5569,35 @@ public final IntervalContext interval() throws RecognitionException { } } - setState(716); + setState(722); switch (_input.LA(1)) { case INTEGER_VALUE: case DECIMAL_VALUE: { - setState(714); + setState(720); ((IntervalContext)_localctx).valueNumeric = number(); } break; case PARAM: case STRING: { - setState(715); + setState(721); ((IntervalContext)_localctx).valuePattern = string(); } break; default: throw new NoViableAltException(this); } - setState(718); + setState(724); ((IntervalContext)_localctx).leading = intervalField(); - setState(721); + setState(727); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,96,_ctx) ) { case 1: { - setState(719); + setState(725); match(TO); - setState(720); + setState(726); ((IntervalContext)_localctx).trailing = intervalField(); } break; @@ -5619,7 +5654,7 @@ public final IntervalFieldContext intervalField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(723); + setState(729); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH) | (1L << MONTHS))) != 0) || ((((_la - 74)) & ~0x3f) == 0 && ((1L << (_la - 74)) & ((1L << (SECOND - 74)) | (1L << (SECONDS - 74)) | (1L << (YEAR - 74)) | (1L << (YEARS - 74)))) != 0)) ) { _errHandler.recoverInline(this); @@ -5677,7 +5712,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(725); + setState(731); identifier(); } } @@ -5729,25 +5764,25 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(732); + setState(738); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,96,_ctx); + _alt = getInterpreter().adaptivePredict(_input,97,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(727); + setState(733); identifier(); - setState(728); + setState(734); match(DOT); } } } - setState(734); + setState(740); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,96,_ctx); + _alt = getInterpreter().adaptivePredict(_input,97,_ctx); } - setState(735); + setState(741); identifier(); } } @@ -5792,13 +5827,13 @@ public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); enterRule(_localctx, 94, RULE_identifier); try { - setState(739); + setState(745); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(737); + setState(743); quoteIdentifier(); } break; @@ -5842,7 +5877,7 @@ public final IdentifierContext identifier() throws RecognitionException { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(738); + setState(744); unquoteIdentifier(); } break; @@ -5895,43 +5930,43 @@ public final TableIdentifierContext tableIdentifier() throws RecognitionExceptio enterRule(_localctx, 96, RULE_tableIdentifier); int _la; try { - setState(753); + setState(759); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,100,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,101,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(744); + setState(750); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH) | (1L << OPTIMIZED))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { { - setState(741); + setState(747); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(742); + setState(748); match(T__3); } } - setState(746); + setState(752); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(750); + setState(756); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,99,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,100,_ctx) ) { case 1: { - setState(747); + setState(753); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(748); + setState(754); match(T__3); } break; } - setState(752); + setState(758); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -5998,13 +6033,13 @@ public final QuoteIdentifierContext quoteIdentifier() throws RecognitionExceptio QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); enterRule(_localctx, 98, RULE_quoteIdentifier); try { - setState(757); + setState(763); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(755); + setState(761); match(QUOTED_IDENTIFIER); } break; @@ -6012,7 +6047,7 @@ public final QuoteIdentifierContext quoteIdentifier() throws RecognitionExceptio _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(756); + setState(762); match(BACKQUOTED_IDENTIFIER); } break; @@ -6084,13 +6119,13 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); enterRule(_localctx, 100, RULE_unquoteIdentifier); try { - setState(762); + setState(768); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(759); + setState(765); match(IDENTIFIER); } break; @@ -6133,7 +6168,7 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(760); + setState(766); nonReserved(); } break; @@ -6141,7 +6176,7 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(761); + setState(767); match(DIGIT_IDENTIFIER); } break; @@ -6210,13 +6245,13 @@ public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); enterRule(_localctx, 102, RULE_number); try { - setState(766); + setState(772); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(764); + setState(770); match(DECIMAL_VALUE); } break; @@ -6224,7 +6259,7 @@ public final NumberContext number() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(765); + setState(771); match(INTEGER_VALUE); } break; @@ -6272,7 +6307,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(768); + setState(774); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -6355,7 +6390,7 @@ public final NonReservedContext nonReserved() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(770); + setState(776); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH) | (1L << OPTIMIZED))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)))) != 0)) ) { _errHandler.recoverInline(this); @@ -6381,6 +6416,8 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return booleanExpression_sempred((BooleanExpressionContext)_localctx, predIndex); case 29: return valueExpression_sempred((ValueExpressionContext)_localctx, predIndex); + case 30: + return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); } return true; } @@ -6396,19 +6433,24 @@ private boolean booleanExpression_sempred(BooleanExpressionContext _localctx, in private boolean valueExpression_sempred(ValueExpressionContext _localctx, int predIndex) { switch (predIndex) { case 2: - return precpred(_ctx, 4); - case 3: return precpred(_ctx, 3); - case 4: + case 3: return precpred(_ctx, 2); - case 5: + case 4: return precpred(_ctx, 1); } return true; } + private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, int predIndex) { + switch (predIndex) { + case 5: + return precpred(_ctx, 9); + } + return true; + } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u0082\u0307\4\2\t"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u0082\u030d\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -6452,270 +6494,272 @@ private boolean valueExpression_sempred(ValueExpressionContext _localctx, int pr "\n\33\3\33\3\33\3\33\5\33\u020d\n\33\3\33\3\33\3\33\3\33\5\33\u0213\n"+ "\33\3\33\5\33\u0216\n\33\3\34\3\34\3\34\3\35\3\35\5\35\u021d\n\35\3\36"+ "\3\36\3\36\3\36\3\36\3\36\5\36\u0225\n\36\3\37\3\37\3\37\3\37\5\37\u022b"+ - "\n\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37"+ - "\7\37\u023a\n\37\f\37\16\37\u023d\13\37\3 \3 \3 \3 \3 \3 \3 \5 \u0246"+ - "\n \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \5 \u0253\n \3!\3!\3!\5!\u0258\n!"+ - "\3!\3!\3!\5!\u025d\n!\3!\5!\u0260\n!\5!\u0262\n!\3\"\3\"\3\"\3\"\3\"\3"+ - "\"\3\"\3\"\3\"\3\"\5\"\u026e\n\"\3#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3"+ - "$\3$\3%\3%\3%\3%\3%\5%\u0283\n%\3&\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3"+ - "\'\5\'\u0291\n\'\3(\3(\3(\5(\u0296\n(\3(\3(\3(\7(\u029b\n(\f(\16(\u029e"+ - "\13(\5(\u02a0\n(\3(\3(\3)\3)\3)\5)\u02a7\n)\3*\3*\3*\3*\3*\6*\u02ae\n"+ - "*\r*\16*\u02af\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\5*\u02c3"+ - "\n*\3+\3+\3,\3,\3-\3-\5-\u02cb\n-\3-\3-\5-\u02cf\n-\3-\3-\3-\5-\u02d4"+ - "\n-\3.\3.\3/\3/\3\60\3\60\3\60\7\60\u02dd\n\60\f\60\16\60\u02e0\13\60"+ - "\3\60\3\60\3\61\3\61\5\61\u02e6\n\61\3\62\3\62\3\62\5\62\u02eb\n\62\3"+ - "\62\3\62\3\62\3\62\5\62\u02f1\n\62\3\62\5\62\u02f4\n\62\3\63\3\63\5\63"+ - "\u02f8\n\63\3\64\3\64\3\64\5\64\u02fd\n\64\3\65\3\65\5\65\u0301\n\65\3"+ - "\66\3\66\3\67\3\67\3\67\2\4.<8\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36"+ - " \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`bdfhjl\2\22\b\2\7\7\t\t\36"+ - "\36\66\66AAEE\4\2((SS\4\2\t\tAA\4\2%%--\3\2\32\33\3\2mn\4\2\7\7ww\4\2"+ - "\r\r\32\32\4\2##\62\62\4\2\7\7\34\34\3\2oq\3\2fl\4\2\"\"TT\7\2\27\30+"+ - ",8;LM\\]\3\2uv\30\2\b\t\22\23\27\27\31\31\36\36 #$&(++//\62\62\65\66"+ - "88::AAEGILOPRSVWYY\\\\\u0365\2n\3\2\2\2\4q\3\2\2\2\6\u00d9\3\2\2\2\b\u00e4"+ - "\3\2\2\2\n\u00e8\3\2\2\2\f\u00fd\3\2\2\2\16\u0104\3\2\2\2\20\u0106\3\2"+ - "\2\2\22\u010e\3\2\2\2\24\u012a\3\2\2\2\26\u0134\3\2\2\2\30\u013e\3\2\2"+ - "\2\32\u014d\3\2\2\2\34\u014f\3\2\2\2\36\u0155\3\2\2\2 \u0157\3\2\2\2\""+ - "\u015e\3\2\2\2$\u0170\3\2\2\2&\u0181\3\2\2\2(\u0191\3\2\2\2*\u01ac\3\2"+ - "\2\2,\u01ae\3\2\2\2.\u01cf\3\2\2\2\60\u01e0\3\2\2\2\62\u01e3\3\2\2\2\64"+ - "\u0215\3\2\2\2\66\u0217\3\2\2\28\u021a\3\2\2\2:\u0224\3\2\2\2<\u022a\3"+ - "\2\2\2>\u0252\3\2\2\2@\u0261\3\2\2\2B\u026d\3\2\2\2D\u026f\3\2\2\2F\u0276"+ - "\3\2\2\2H\u0282\3\2\2\2J\u0284\3\2\2\2L\u0290\3\2\2\2N\u0292\3\2\2\2P"+ - "\u02a6\3\2\2\2R\u02c2\3\2\2\2T\u02c4\3\2\2\2V\u02c6\3\2\2\2X\u02c8\3\2"+ - "\2\2Z\u02d5\3\2\2\2\\\u02d7\3\2\2\2^\u02de\3\2\2\2`\u02e5\3\2\2\2b\u02f3"+ - "\3\2\2\2d\u02f7\3\2\2\2f\u02fc\3\2\2\2h\u0300\3\2\2\2j\u0302\3\2\2\2l"+ - "\u0304\3\2\2\2no\5\6\4\2op\7\2\2\3p\3\3\2\2\2qr\5,\27\2rs\7\2\2\3s\5\3"+ - "\2\2\2t\u00da\5\b\5\2u\u0083\7 \2\2v\177\7\3\2\2wx\7G\2\2x~\t\2\2\2yz"+ - "\7$\2\2z~\t\3\2\2{|\7Y\2\2|~\5V,\2}w\3\2\2\2}y\3\2\2\2}{\3\2\2\2~\u0081"+ - "\3\2\2\2\177}\3\2\2\2\177\u0080\3\2\2\2\u0080\u0082\3\2\2\2\u0081\177"+ - "\3\2\2\2\u0082\u0084\7\4\2\2\u0083v\3\2\2\2\u0083\u0084\3\2\2\2\u0084"+ - "\u0085\3\2\2\2\u0085\u00da\5\6\4\2\u0086\u0092\7\31\2\2\u0087\u008e\7"+ - "\3\2\2\u0088\u0089\7G\2\2\u0089\u008d\t\4\2\2\u008a\u008b\7$\2\2\u008b"+ - "\u008d\t\3\2\2\u008c\u0088\3\2\2\2\u008c\u008a\3\2\2\2\u008d\u0090\3\2"+ - "\2\2\u008e\u008c\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0091\3\2\2\2\u0090"+ - "\u008e\3\2\2\2\u0091\u0093\7\4\2\2\u0092\u0087\3\2\2\2\u0092\u0093\3\2"+ - "\2\2\u0093\u0094\3\2\2\2\u0094\u00da\5\6\4\2\u0095\u0096\7O\2\2\u0096"+ - "\u0099\7R\2\2\u0097\u009a\5\66\34\2\u0098\u009a\5b\62\2\u0099\u0097\3"+ - "\2\2\2\u0099\u0098\3\2\2\2\u0099\u009a\3\2\2\2\u009a\u00da\3\2\2\2\u009b"+ - "\u009c\7O\2\2\u009c\u009d\7\23\2\2\u009d\u00a0\t\5\2\2\u009e\u00a1\5\66"+ - "\34\2\u009f\u00a1\5b\62\2\u00a0\u009e\3\2\2\2\u00a0\u009f\3\2\2\2\u00a1"+ - "\u00da\3\2\2\2\u00a2\u00a5\t\6\2\2\u00a3\u00a6\5\66\34\2\u00a4\u00a6\5"+ - "b\62\2\u00a5\u00a3\3\2\2\2\u00a5\u00a4\3\2\2\2\u00a6\u00da\3\2\2\2\u00a7"+ - "\u00a8\7O\2\2\u00a8\u00aa\7\'\2\2\u00a9\u00ab\5\66\34\2\u00aa\u00a9\3"+ - "\2\2\2\u00aa\u00ab\3\2\2\2\u00ab\u00da\3\2\2\2\u00ac\u00ad\7O\2\2\u00ad"+ - "\u00da\7K\2\2\u00ae\u00af\7P\2\2\u00af\u00b2\7R\2\2\u00b0\u00b1\7\21\2"+ - "\2\u00b1\u00b3\5\66\34\2\u00b2\u00b0\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3"+ - "\u00b6\3\2\2\2\u00b4\u00b7\5\66\34\2\u00b5\u00b7\5b\62\2\u00b6\u00b4\3"+ - "\2\2\2\u00b6\u00b5\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00c1\3\2\2\2\u00b8"+ - "\u00b9\7V\2\2\u00b9\u00be\5j\66\2\u00ba\u00bb\7\5\2\2\u00bb\u00bd\5j\66"+ - "\2\u00bc\u00ba\3\2\2\2\u00bd\u00c0\3\2\2\2\u00be\u00bc\3\2\2\2\u00be\u00bf"+ - "\3\2\2\2\u00bf\u00c2\3\2\2\2\u00c0\u00be\3\2\2\2\u00c1\u00b8\3\2\2\2\u00c1"+ - "\u00c2\3\2\2\2\u00c2\u00da\3\2\2\2\u00c3\u00c4\7P\2\2\u00c4\u00c7\7\23"+ - "\2\2\u00c5\u00c6\7\21\2\2\u00c6\u00c8\5j\66\2\u00c7\u00c5\3\2\2\2\u00c7"+ - "\u00c8\3\2\2\2\u00c8\u00cc\3\2\2\2\u00c9\u00ca\7Q\2\2\u00ca\u00cd\5\66"+ - "\34\2\u00cb\u00cd\5b\62\2\u00cc\u00c9\3\2\2\2\u00cc\u00cb\3\2\2\2\u00cc"+ - "\u00cd\3\2\2\2\u00cd\u00cf\3\2\2\2\u00ce\u00d0\5\66\34\2\u00cf\u00ce\3"+ - "\2\2\2\u00cf\u00d0\3\2\2\2\u00d0\u00da\3\2\2\2\u00d1\u00d2\7P\2\2\u00d2"+ - "\u00d7\7W\2\2\u00d3\u00d5\t\7\2\2\u00d4\u00d3\3\2\2\2\u00d4\u00d5\3\2"+ - "\2\2\u00d5\u00d6\3\2\2\2\u00d6\u00d8\5h\65\2\u00d7\u00d4\3\2\2\2\u00d7"+ - "\u00d8\3\2\2\2\u00d8\u00da\3\2\2\2\u00d9t\3\2\2\2\u00d9u\3\2\2\2\u00d9"+ - "\u0086\3\2\2\2\u00d9\u0095\3\2\2\2\u00d9\u009b\3\2\2\2\u00d9\u00a2\3\2"+ - "\2\2\u00d9\u00a7\3\2\2\2\u00d9\u00ac\3\2\2\2\u00d9\u00ae\3\2\2\2\u00d9"+ - "\u00c3\3\2\2\2\u00d9\u00d1\3\2\2\2\u00da\7\3\2\2\2\u00db\u00dc\7[\2\2"+ - "\u00dc\u00e1\5\34\17\2\u00dd\u00de\7\5\2\2\u00de\u00e0\5\34\17\2\u00df"+ - "\u00dd\3\2\2\2\u00e0\u00e3\3\2\2\2\u00e1\u00df\3\2\2\2\u00e1\u00e2\3\2"+ - "\2\2\u00e2\u00e5\3\2\2\2\u00e3\u00e1\3\2\2\2\u00e4\u00db\3\2\2\2\u00e4"+ - "\u00e5\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00e7\5\n\6\2\u00e7\t\3\2\2\2"+ - "\u00e8\u00f3\5\16\b\2\u00e9\u00ea\7C\2\2\u00ea\u00eb\7\17\2\2\u00eb\u00f0"+ - "\5\20\t\2\u00ec\u00ed\7\5\2\2\u00ed\u00ef\5\20\t\2\u00ee\u00ec\3\2\2\2"+ - "\u00ef\u00f2\3\2\2\2\u00f0\u00ee\3\2\2\2\u00f0\u00f1\3\2\2\2\u00f1\u00f4"+ - "\3\2\2\2\u00f2\u00f0\3\2\2\2\u00f3\u00e9\3\2\2\2\u00f3\u00f4\3\2\2\2\u00f4"+ - "\u00f6\3\2\2\2\u00f5\u00f7\5\f\7\2\u00f6\u00f5\3\2\2\2\u00f6\u00f7\3\2"+ - "\2\2\u00f7\13\3\2\2\2\u00f8\u00f9\7\65\2\2\u00f9\u00fe\t\b\2\2\u00fa\u00fb"+ - "\7`\2\2\u00fb\u00fc\t\b\2\2\u00fc\u00fe\7e\2\2\u00fd\u00f8\3\2\2\2\u00fd"+ - "\u00fa\3\2\2\2\u00fe\r\3\2\2\2\u00ff\u0105\5\22\n\2\u0100\u0101\7\3\2"+ - "\2\u0101\u0102\5\n\6\2\u0102\u0103\7\4\2\2\u0103\u0105\3\2\2\2\u0104\u00ff"+ - "\3\2\2\2\u0104\u0100\3\2\2\2\u0105\17\3\2\2\2\u0106\u0108\5,\27\2\u0107"+ - "\u0109\t\t\2\2\u0108\u0107\3\2\2\2\u0108\u0109\3\2\2\2\u0109\u010c\3\2"+ - "\2\2\u010a\u010b\7?\2\2\u010b\u010d\t\n\2\2\u010c\u010a\3\2\2\2\u010c"+ - "\u010d\3\2\2\2\u010d\21\3\2\2\2\u010e\u0110\7N\2\2\u010f\u0111\5\36\20"+ - "\2\u0110\u010f\3\2\2\2\u0110\u0111\3\2\2\2\u0111\u0112\3\2\2\2\u0112\u0117"+ - "\5 \21\2\u0113\u0114\7\5\2\2\u0114\u0116\5 \21\2\u0115\u0113\3\2\2\2\u0116"+ - "\u0119\3\2\2\2\u0117\u0115\3\2\2\2\u0117\u0118\3\2\2\2\u0118\u011b\3\2"+ - "\2\2\u0119\u0117\3\2\2\2\u011a\u011c\5\24\13\2\u011b\u011a\3\2\2\2\u011b"+ - "\u011c\3\2\2\2\u011c\u011f\3\2\2\2\u011d\u011e\7Z\2\2\u011e\u0120\5.\30"+ - "\2\u011f\u011d\3\2\2\2\u011f\u0120\3\2\2\2\u0120\u0124\3\2\2\2\u0121\u0122"+ - "\7)\2\2\u0122\u0123\7\17\2\2\u0123\u0125\5\26\f\2\u0124\u0121\3\2\2\2"+ - "\u0124\u0125\3\2\2\2\u0125\u0128\3\2\2\2\u0126\u0127\7*\2\2\u0127\u0129"+ - "\5.\30\2\u0128\u0126\3\2\2\2\u0128\u0129\3\2\2\2\u0129\23\3\2\2\2\u012a"+ - "\u012b\7%\2\2\u012b\u0130\5\"\22\2\u012c\u012d\7\5\2\2\u012d\u012f\5\""+ - "\22\2\u012e\u012c\3\2\2\2\u012f\u0132\3\2\2\2\u0130\u012e\3\2\2\2\u0130"+ - "\u0131\3\2\2\2\u0131\25\3\2\2\2\u0132\u0130\3\2\2\2\u0133\u0135\5\36\20"+ - "\2\u0134\u0133\3\2\2\2\u0134\u0135\3\2\2\2\u0135\u0136\3\2\2\2\u0136\u013b"+ - "\5\30\r\2\u0137\u0138\7\5\2\2\u0138\u013a\5\30\r\2\u0139\u0137\3\2\2\2"+ - "\u013a\u013d\3\2\2\2\u013b\u0139\3\2\2\2\u013b\u013c\3\2\2\2\u013c\27"+ - "\3\2\2\2\u013d\u013b\3\2\2\2\u013e\u013f\5\32\16\2\u013f\31\3\2\2\2\u0140"+ - "\u0149\7\3\2\2\u0141\u0146\5,\27\2\u0142\u0143\7\5\2\2\u0143\u0145\5,"+ - "\27\2\u0144\u0142\3\2\2\2\u0145\u0148\3\2\2\2\u0146\u0144\3\2\2\2\u0146"+ - "\u0147\3\2\2\2\u0147\u014a\3\2\2\2\u0148\u0146\3\2\2\2\u0149\u0141\3\2"+ - "\2\2\u0149\u014a\3\2\2\2\u014a\u014b\3\2\2\2\u014b\u014e\7\4\2\2\u014c"+ - "\u014e\5,\27\2\u014d\u0140\3\2\2\2\u014d\u014c\3\2\2\2\u014e\33\3\2\2"+ - "\2\u014f\u0150\5`\61\2\u0150\u0151\7\f\2\2\u0151\u0152\7\3\2\2\u0152\u0153"+ - "\5\n\6\2\u0153\u0154\7\4\2\2\u0154\35\3\2\2\2\u0155\u0156\t\13\2\2\u0156"+ - "\37\3\2\2\2\u0157\u015c\5,\27\2\u0158\u015a\7\f\2\2\u0159\u0158\3\2\2"+ - "\2\u0159\u015a\3\2\2\2\u015a\u015b\3\2\2\2\u015b\u015d\5`\61\2\u015c\u0159"+ - "\3\2\2\2\u015c\u015d\3\2\2\2\u015d!\3\2\2\2\u015e\u0162\5*\26\2\u015f"+ - "\u0161\5$\23\2\u0160\u015f\3\2\2\2\u0161\u0164\3\2\2\2\u0162\u0160\3\2"+ - "\2\2\u0162\u0163\3\2\2\2\u0163#\3\2\2\2\u0164\u0162\3\2\2\2\u0165\u0166"+ - "\5&\24\2\u0166\u0167\7\61\2\2\u0167\u0169\5*\26\2\u0168\u016a\5(\25\2"+ - "\u0169\u0168\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u0171\3\2\2\2\u016b\u016c"+ - "\7<\2\2\u016c\u016d\5&\24\2\u016d\u016e\7\61\2\2\u016e\u016f\5*\26\2\u016f"+ - "\u0171\3\2\2\2\u0170\u0165\3\2\2\2\u0170\u016b\3\2\2\2\u0171%\3\2\2\2"+ - "\u0172\u0174\7.\2\2\u0173\u0172\3\2\2\2\u0173\u0174\3\2\2\2\u0174\u0182"+ - "\3\2\2\2\u0175\u0177\7\63\2\2\u0176\u0178\7D\2\2\u0177\u0176\3\2\2\2\u0177"+ - "\u0178\3\2\2\2\u0178\u0182\3\2\2\2\u0179\u017b\7H\2\2\u017a\u017c\7D\2"+ - "\2\u017b\u017a\3\2\2\2\u017b\u017c\3\2\2\2\u017c\u0182\3\2\2\2\u017d\u017f"+ - "\7&\2\2\u017e\u0180\7D\2\2\u017f\u017e\3\2\2\2\u017f\u0180\3\2\2\2\u0180"+ - "\u0182\3\2\2\2\u0181\u0173\3\2\2\2\u0181\u0175\3\2\2\2\u0181\u0179\3\2"+ - "\2\2\u0181\u017d\3\2\2\2\u0182\'\3\2\2\2\u0183\u0184\7@\2\2\u0184\u0192"+ - "\5.\30\2\u0185\u0186\7X\2\2\u0186\u0187\7\3\2\2\u0187\u018c\5`\61\2\u0188"+ - "\u0189\7\5\2\2\u0189\u018b\5`\61\2\u018a\u0188\3\2\2\2\u018b\u018e\3\2"+ - "\2\2\u018c\u018a\3\2\2\2\u018c\u018d\3\2\2\2\u018d\u018f\3\2\2\2\u018e"+ - "\u018c\3\2\2\2\u018f\u0190\7\4\2\2\u0190\u0192\3\2\2\2\u0191\u0183\3\2"+ - "\2\2\u0191\u0185\3\2\2\2\u0192)\3\2\2\2\u0193\u0198\5b\62\2\u0194\u0196"+ - "\7\f\2\2\u0195\u0194\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0197\3\2\2\2\u0197"+ - "\u0199\5^\60\2\u0198\u0195\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u01ad\3\2"+ - "\2\2\u019a\u019b\7\3\2\2\u019b\u019c\5\n\6\2\u019c\u01a1\7\4\2\2\u019d"+ - "\u019f\7\f\2\2\u019e\u019d\3\2\2\2\u019e\u019f\3\2\2\2\u019f\u01a0\3\2"+ - "\2\2\u01a0\u01a2\5^\60\2\u01a1\u019e\3\2\2\2\u01a1\u01a2\3\2\2\2\u01a2"+ - "\u01ad\3\2\2\2\u01a3\u01a4\7\3\2\2\u01a4\u01a5\5\"\22\2\u01a5\u01aa\7"+ - "\4\2\2\u01a6\u01a8\7\f\2\2\u01a7\u01a6\3\2\2\2\u01a7\u01a8\3\2\2\2\u01a8"+ - "\u01a9\3\2\2\2\u01a9\u01ab\5^\60\2\u01aa\u01a7\3\2\2\2\u01aa\u01ab\3\2"+ - "\2\2\u01ab\u01ad\3\2\2\2\u01ac\u0193\3\2\2\2\u01ac\u019a\3\2\2\2\u01ac"+ - "\u01a3\3\2\2\2\u01ad+\3\2\2\2\u01ae\u01af\5.\30\2\u01af-\3\2\2\2\u01b0"+ - "\u01b1\b\30\1\2\u01b1\u01b2\7=\2\2\u01b2\u01d0\5.\30\n\u01b3\u01b4\7\37"+ - "\2\2\u01b4\u01b5\7\3\2\2\u01b5\u01b6\5\b\5\2\u01b6\u01b7\7\4\2\2\u01b7"+ - "\u01d0\3\2\2\2\u01b8\u01b9\7J\2\2\u01b9\u01ba\7\3\2\2\u01ba\u01bb\5j\66"+ - "\2\u01bb\u01bc\5\60\31\2\u01bc\u01bd\7\4\2\2\u01bd\u01d0\3\2\2\2\u01be"+ - "\u01bf\7\67\2\2\u01bf\u01c0\7\3\2\2\u01c0\u01c1\5^\60\2\u01c1\u01c2\7"+ - "\5\2\2\u01c2\u01c3\5j\66\2\u01c3\u01c4\5\60\31\2\u01c4\u01c5\7\4\2\2\u01c5"+ - "\u01d0\3\2\2\2\u01c6\u01c7\7\67\2\2\u01c7\u01c8\7\3\2\2\u01c8\u01c9\5"+ - "j\66\2\u01c9\u01ca\7\5\2\2\u01ca\u01cb\5j\66\2\u01cb\u01cc\5\60\31\2\u01cc"+ - "\u01cd\7\4\2\2\u01cd\u01d0\3\2\2\2\u01ce\u01d0\5\62\32\2\u01cf\u01b0\3"+ - "\2\2\2\u01cf\u01b3\3\2\2\2\u01cf\u01b8\3\2\2\2\u01cf\u01be\3\2\2\2\u01cf"+ - "\u01c6\3\2\2\2\u01cf\u01ce\3\2\2\2\u01d0\u01d9\3\2\2\2\u01d1\u01d2\f\4"+ - "\2\2\u01d2\u01d3\7\n\2\2\u01d3\u01d8\5.\30\5\u01d4\u01d5\f\3\2\2\u01d5"+ - "\u01d6\7B\2\2\u01d6\u01d8\5.\30\4\u01d7\u01d1\3\2\2\2\u01d7\u01d4\3\2"+ - "\2\2\u01d8\u01db\3\2\2\2\u01d9\u01d7\3\2\2\2\u01d9\u01da\3\2\2\2\u01da"+ - "/\3\2\2\2\u01db\u01d9\3\2\2\2\u01dc\u01dd\7\5\2\2\u01dd\u01df\5j\66\2"+ - "\u01de\u01dc\3\2\2\2\u01df\u01e2\3\2\2\2\u01e0\u01de\3\2\2\2\u01e0\u01e1"+ - "\3\2\2\2\u01e1\61\3\2\2\2\u01e2\u01e0\3\2\2\2\u01e3\u01e5\5<\37\2\u01e4"+ - "\u01e6\5\64\33\2\u01e5\u01e4\3\2\2\2\u01e5\u01e6\3\2\2\2\u01e6\63\3\2"+ - "\2\2\u01e7\u01e9\7=\2\2\u01e8\u01e7\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9"+ - "\u01ea\3\2\2\2\u01ea\u01eb\7\16\2\2\u01eb\u01ec\5<\37\2\u01ec\u01ed\7"+ - "\n\2\2\u01ed\u01ee\5<\37\2\u01ee\u0216\3\2\2\2\u01ef\u01f1\7=\2\2\u01f0"+ - "\u01ef\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1\u01f2\3\2\2\2\u01f2\u01f3\7-"+ - "\2\2\u01f3\u01f4\7\3\2\2\u01f4\u01f9\5<\37\2\u01f5\u01f6\7\5\2\2\u01f6"+ - "\u01f8\5<\37\2\u01f7\u01f5\3\2\2\2\u01f8\u01fb\3\2\2\2\u01f9\u01f7\3\2"+ - "\2\2\u01f9\u01fa\3\2\2\2\u01fa\u01fc\3\2\2\2\u01fb\u01f9\3\2\2\2\u01fc"+ - "\u01fd\7\4\2\2\u01fd\u0216\3\2\2\2\u01fe\u0200\7=\2\2\u01ff\u01fe\3\2"+ - "\2\2\u01ff\u0200\3\2\2\2\u0200\u0201\3\2\2\2\u0201\u0202\7-\2\2\u0202"+ - "\u0203\7\3\2\2\u0203\u0204\5\b\5\2\u0204\u0205\7\4\2\2\u0205\u0216\3\2"+ - "\2\2\u0206\u0208\7=\2\2\u0207\u0206\3\2\2\2\u0207\u0208\3\2\2\2\u0208"+ - "\u0209\3\2\2\2\u0209\u020a\7\64\2\2\u020a\u0216\58\35\2\u020b\u020d\7"+ - "=\2\2\u020c\u020b\3\2\2\2\u020c\u020d\3\2\2\2\u020d\u020e\3\2\2\2\u020e"+ - "\u020f\7I\2\2\u020f\u0216\5j\66\2\u0210\u0212\7\60\2\2\u0211\u0213\7="+ - "\2\2\u0212\u0211\3\2\2\2\u0212\u0213\3\2\2\2\u0213\u0214\3\2\2\2\u0214"+ - "\u0216\7>\2\2\u0215\u01e8\3\2\2\2\u0215\u01f0\3\2\2\2\u0215\u01ff\3\2"+ - "\2\2\u0215\u0207\3\2\2\2\u0215\u020c\3\2\2\2\u0215\u0210\3\2\2\2\u0216"+ - "\65\3\2\2\2\u0217\u0218\7\64\2\2\u0218\u0219\58\35\2\u0219\67\3\2\2\2"+ - "\u021a\u021c\5j\66\2\u021b\u021d\5:\36\2\u021c\u021b\3\2\2\2\u021c\u021d"+ - "\3\2\2\2\u021d9\3\2\2\2\u021e\u021f\7\35\2\2\u021f\u0225\5j\66\2\u0220"+ - "\u0221\7^\2\2\u0221\u0222\5j\66\2\u0222\u0223\7e\2\2\u0223\u0225\3\2\2"+ - "\2\u0224\u021e\3\2\2\2\u0224\u0220\3\2\2\2\u0225;\3\2\2\2\u0226\u0227"+ - "\b\37\1\2\u0227\u022b\5> \2\u0228\u0229\t\7\2\2\u0229\u022b\5<\37\7\u022a"+ - "\u0226\3\2\2\2\u022a\u0228\3\2\2\2\u022b\u023b\3\2\2\2\u022c\u022d\f\6"+ - "\2\2\u022d\u022e\t\f\2\2\u022e\u023a\5<\37\7\u022f\u0230\f\5\2\2\u0230"+ - "\u0231\t\7\2\2\u0231\u023a\5<\37\6\u0232\u0233\f\4\2\2\u0233\u0234\5T"+ - "+\2\u0234\u0235\5<\37\5\u0235\u023a\3\2\2\2\u0236\u0237\f\3\2\2\u0237"+ - "\u0238\7r\2\2\u0238\u023a\5\\/\2\u0239\u022c\3\2\2\2\u0239\u022f\3\2\2"+ - "\2\u0239\u0232\3\2\2\2\u0239\u0236\3\2\2\2\u023a\u023d\3\2\2\2\u023b\u0239"+ - "\3\2\2\2\u023b\u023c\3\2\2\2\u023c=\3\2\2\2\u023d\u023b\3\2\2\2\u023e"+ - "\u0253\5B\"\2\u023f\u0253\5H%\2\u0240\u0253\5@!\2\u0241\u0253\5R*\2\u0242"+ - "\u0243\5^\60\2\u0243\u0244\7t\2\2\u0244\u0246\3\2\2\2\u0245\u0242\3\2"+ - "\2\2\u0245\u0246\3\2\2\2\u0246\u0247\3\2\2\2\u0247\u0253\7o\2\2\u0248"+ - "\u0253\5L\'\2\u0249\u024a\7\3\2\2\u024a\u024b\5\b\5\2\u024b\u024c\7\4"+ - "\2\2\u024c\u0253\3\2\2\2\u024d\u0253\5^\60\2\u024e\u024f\7\3\2\2\u024f"+ - "\u0250\5,\27\2\u0250\u0251\7\4\2\2\u0251\u0253\3\2\2\2\u0252\u023e\3\2"+ - "\2\2\u0252\u023f\3\2\2\2\u0252\u0240\3\2\2\2\u0252\u0241\3\2\2\2\u0252"+ - "\u0245\3\2\2\2\u0252\u0248\3\2\2\2\u0252\u0249\3\2\2\2\u0252\u024d\3\2"+ - "\2\2\u0252\u024e\3\2\2\2\u0253?\3\2\2\2\u0254\u0257\7\25\2\2\u0255\u0256"+ - "\7\3\2\2\u0256\u0258\7\4\2\2\u0257\u0255\3\2\2\2\u0257\u0258\3\2\2\2\u0258"+ - "\u0262\3\2\2\2\u0259\u025f\7\26\2\2\u025a\u025c\7\3\2\2\u025b\u025d\7"+ - "w\2\2\u025c\u025b\3\2\2\2\u025c\u025d\3\2\2\2\u025d\u025e\3\2\2\2\u025e"+ - "\u0260\7\4\2\2\u025f\u025a\3\2\2\2\u025f\u0260\3\2\2\2\u0260\u0262\3\2"+ - "\2\2\u0261\u0254\3\2\2\2\u0261\u0259\3\2\2\2\u0262A\3\2\2\2\u0263\u026e"+ - "\5D#\2\u0264\u0265\7_\2\2\u0265\u0266\5D#\2\u0266\u0267\7e\2\2\u0267\u026e"+ - "\3\2\2\2\u0268\u026e\5F$\2\u0269\u026a\7_\2\2\u026a\u026b\5F$\2\u026b"+ - "\u026c\7e\2\2\u026c\u026e\3\2\2\2\u026d\u0263\3\2\2\2\u026d\u0264\3\2"+ - "\2\2\u026d\u0268\3\2\2\2\u026d\u0269\3\2\2\2\u026eC\3\2\2\2\u026f\u0270"+ - "\7\20\2\2\u0270\u0271\7\3\2\2\u0271\u0272\5,\27\2\u0272\u0273\7\f\2\2"+ - "\u0273\u0274\5\\/\2\u0274\u0275\7\4\2\2\u0275E\3\2\2\2\u0276\u0277\7\24"+ - "\2\2\u0277\u0278\7\3\2\2\u0278\u0279\5,\27\2\u0279\u027a\7\5\2\2\u027a"+ - "\u027b\5\\/\2\u027b\u027c\7\4\2\2\u027cG\3\2\2\2\u027d\u0283\5J&\2\u027e"+ - "\u027f\7_\2\2\u027f\u0280\5J&\2\u0280\u0281\7e\2\2\u0281\u0283\3\2\2\2"+ - "\u0282\u027d\3\2\2\2\u0282\u027e\3\2\2\2\u0283I\3\2\2\2\u0284\u0285\7"+ - "!\2\2\u0285\u0286\7\3\2\2\u0286\u0287\5`\61\2\u0287\u0288\7%\2\2\u0288"+ - "\u0289\5<\37\2\u0289\u028a\7\4\2\2\u028aK\3\2\2\2\u028b\u0291\5N(\2\u028c"+ - "\u028d\7_\2\2\u028d\u028e\5N(\2\u028e\u028f\7e\2\2\u028f\u0291\3\2\2\2"+ - "\u0290\u028b\3\2\2\2\u0290\u028c\3\2\2\2\u0291M\3\2\2\2\u0292\u0293\5"+ - "P)\2\u0293\u029f\7\3\2\2\u0294\u0296\5\36\20\2\u0295\u0294\3\2\2\2\u0295"+ - "\u0296\3\2\2\2\u0296\u0297\3\2\2\2\u0297\u029c\5,\27\2\u0298\u0299\7\5"+ - "\2\2\u0299\u029b\5,\27\2\u029a\u0298\3\2\2\2\u029b\u029e\3\2\2\2\u029c"+ - "\u029a\3\2\2\2\u029c\u029d\3\2\2\2\u029d\u02a0\3\2\2\2\u029e\u029c\3\2"+ - "\2\2\u029f\u0295\3\2\2\2\u029f\u02a0\3\2\2\2\u02a0\u02a1\3\2\2\2\u02a1"+ - "\u02a2\7\4\2\2\u02a2O\3\2\2\2\u02a3\u02a7\7\63\2\2\u02a4\u02a7\7H\2\2"+ - "\u02a5\u02a7\5`\61\2\u02a6\u02a3\3\2\2\2\u02a6\u02a4\3\2\2\2\u02a6\u02a5"+ - "\3\2\2\2\u02a7Q\3\2\2\2\u02a8\u02c3\7>\2\2\u02a9\u02c3\5X-\2\u02aa\u02c3"+ - "\5h\65\2\u02ab\u02c3\5V,\2\u02ac\u02ae\7v\2\2\u02ad\u02ac\3\2\2\2\u02ae"+ - "\u02af\3\2\2\2\u02af\u02ad\3\2\2\2\u02af\u02b0\3\2\2\2\u02b0\u02c3\3\2"+ - "\2\2\u02b1\u02c3\7u\2\2\u02b2\u02b3\7a\2\2\u02b3\u02b4\5j\66\2\u02b4\u02b5"+ - "\7e\2\2\u02b5\u02c3\3\2\2\2\u02b6\u02b7\7b\2\2\u02b7\u02b8\5j\66\2\u02b8"+ - "\u02b9\7e\2\2\u02b9\u02c3\3\2\2\2\u02ba\u02bb\7c\2\2\u02bb\u02bc\5j\66"+ - "\2\u02bc\u02bd\7e\2\2\u02bd\u02c3\3\2\2\2\u02be\u02bf\7d\2\2\u02bf\u02c0"+ - "\5j\66\2\u02c0\u02c1\7e\2\2\u02c1\u02c3\3\2\2\2\u02c2\u02a8\3\2\2\2\u02c2"+ - "\u02a9\3\2\2\2\u02c2\u02aa\3\2\2\2\u02c2\u02ab\3\2\2\2\u02c2\u02ad\3\2"+ - "\2\2\u02c2\u02b1\3\2\2\2\u02c2\u02b2\3\2\2\2\u02c2\u02b6\3\2\2\2\u02c2"+ - "\u02ba\3\2\2\2\u02c2\u02be\3\2\2\2\u02c3S\3\2\2\2\u02c4\u02c5\t\r\2\2"+ - "\u02c5U\3\2\2\2\u02c6\u02c7\t\16\2\2\u02c7W\3\2\2\2\u02c8\u02ca\7/\2\2"+ - "\u02c9\u02cb\t\7\2\2\u02ca\u02c9\3\2\2\2\u02ca\u02cb\3\2\2\2\u02cb\u02ce"+ - "\3\2\2\2\u02cc\u02cf\5h\65\2\u02cd\u02cf\5j\66\2\u02ce\u02cc\3\2\2\2\u02ce"+ - "\u02cd\3\2\2\2\u02cf\u02d0\3\2\2\2\u02d0\u02d3\5Z.\2\u02d1\u02d2\7U\2"+ - "\2\u02d2\u02d4\5Z.\2\u02d3\u02d1\3\2\2\2\u02d3\u02d4\3\2\2\2\u02d4Y\3"+ - "\2\2\2\u02d5\u02d6\t\17\2\2\u02d6[\3\2\2\2\u02d7\u02d8\5`\61\2\u02d8]"+ - "\3\2\2\2\u02d9\u02da\5`\61\2\u02da\u02db\7t\2\2\u02db\u02dd\3\2\2\2\u02dc"+ - "\u02d9\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2\u02de\u02df\3\2"+ - "\2\2\u02df\u02e1\3\2\2\2\u02e0\u02de\3\2\2\2\u02e1\u02e2\5`\61\2\u02e2"+ - "_\3\2\2\2\u02e3\u02e6\5d\63\2\u02e4\u02e6\5f\64\2\u02e5\u02e3\3\2\2\2"+ - "\u02e5\u02e4\3\2\2\2\u02e6a\3\2\2\2\u02e7\u02e8\5`\61\2\u02e8\u02e9\7"+ - "\6\2\2\u02e9\u02eb\3\2\2\2\u02ea\u02e7\3\2\2\2\u02ea\u02eb\3\2\2\2\u02eb"+ - "\u02ec\3\2\2\2\u02ec\u02f4\7{\2\2\u02ed\u02ee\5`\61\2\u02ee\u02ef\7\6"+ - "\2\2\u02ef\u02f1\3\2\2\2\u02f0\u02ed\3\2\2\2\u02f0\u02f1\3\2\2\2\u02f1"+ - "\u02f2\3\2\2\2\u02f2\u02f4\5`\61\2\u02f3\u02ea\3\2\2\2\u02f3\u02f0\3\2"+ - "\2\2\u02f4c\3\2\2\2\u02f5\u02f8\7|\2\2\u02f6\u02f8\7}\2\2\u02f7\u02f5"+ - "\3\2\2\2\u02f7\u02f6\3\2\2\2\u02f8e\3\2\2\2\u02f9\u02fd\7y\2\2\u02fa\u02fd"+ - "\5l\67\2\u02fb\u02fd\7z\2\2\u02fc\u02f9\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fc"+ - "\u02fb\3\2\2\2\u02fdg\3\2\2\2\u02fe\u0301\7x\2\2\u02ff\u0301\7w\2\2\u0300"+ - "\u02fe\3\2\2\2\u0300\u02ff\3\2\2\2\u0301i\3\2\2\2\u0302\u0303\t\20\2\2"+ - "\u0303k\3\2\2\2\u0304\u0305\t\21\2\2\u0305m\3\2\2\2j}\177\u0083\u008c"+ + "\n\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\7\37\u0237\n\37"+ + "\f\37\16\37\u023a\13\37\3 \3 \3 \3 \3 \3 \3 \3 \5 \u0244\n \3 \3 \3 \3"+ + " \3 \3 \3 \3 \3 \3 \3 \5 \u0251\n \3 \3 \3 \7 \u0256\n \f \16 \u0259\13"+ + " \3!\3!\3!\5!\u025e\n!\3!\3!\3!\5!\u0263\n!\3!\5!\u0266\n!\5!\u0268\n"+ + "!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5\"\u0274\n\"\3#\3#\3#\3#\3"+ + "#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\5%\u0289\n%\3&\3&\3&\3&\3"+ + "&\3&\3&\3\'\3\'\3\'\3\'\3\'\5\'\u0297\n\'\3(\3(\3(\5(\u029c\n(\3(\3(\3"+ + "(\7(\u02a1\n(\f(\16(\u02a4\13(\5(\u02a6\n(\3(\3(\3)\3)\3)\5)\u02ad\n)"+ + "\3*\3*\3*\3*\3*\6*\u02b4\n*\r*\16*\u02b5\3*\3*\3*\3*\3*\3*\3*\3*\3*\3"+ + "*\3*\3*\3*\3*\3*\3*\3*\5*\u02c9\n*\3+\3+\3,\3,\3-\3-\5-\u02d1\n-\3-\3"+ + "-\5-\u02d5\n-\3-\3-\3-\5-\u02da\n-\3.\3.\3/\3/\3\60\3\60\3\60\7\60\u02e3"+ + "\n\60\f\60\16\60\u02e6\13\60\3\60\3\60\3\61\3\61\5\61\u02ec\n\61\3\62"+ + "\3\62\3\62\5\62\u02f1\n\62\3\62\3\62\3\62\3\62\5\62\u02f7\n\62\3\62\5"+ + "\62\u02fa\n\62\3\63\3\63\5\63\u02fe\n\63\3\64\3\64\3\64\5\64\u0303\n\64"+ + "\3\65\3\65\5\65\u0307\n\65\3\66\3\66\3\67\3\67\3\67\2\5.<>8\2\4\6\b\n"+ + "\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\"+ + "^`bdfhjl\2\22\b\2\7\7\t\t\36\36\66\66AAEE\4\2((SS\4\2\t\tAA\4\2%%--\3"+ + "\2\32\33\3\2mn\4\2\7\7ww\4\2\r\r\32\32\4\2##\62\62\4\2\7\7\34\34\3\2o"+ + "q\3\2fl\4\2\"\"TT\7\2\27\30+,8;LM\\]\3\2uv\30\2\b\t\22\23\27\27\31\31"+ + "\36\36 #$&(++//\62\62\65\6688::AAEGILOPRSVWYY\\\\\u036b\2n\3\2\2\2\4"+ + "q\3\2\2\2\6\u00d9\3\2\2\2\b\u00e4\3\2\2\2\n\u00e8\3\2\2\2\f\u00fd\3\2"+ + "\2\2\16\u0104\3\2\2\2\20\u0106\3\2\2\2\22\u010e\3\2\2\2\24\u012a\3\2\2"+ + "\2\26\u0134\3\2\2\2\30\u013e\3\2\2\2\32\u014d\3\2\2\2\34\u014f\3\2\2\2"+ + "\36\u0155\3\2\2\2 \u0157\3\2\2\2\"\u015e\3\2\2\2$\u0170\3\2\2\2&\u0181"+ + "\3\2\2\2(\u0191\3\2\2\2*\u01ac\3\2\2\2,\u01ae\3\2\2\2.\u01cf\3\2\2\2\60"+ + "\u01e0\3\2\2\2\62\u01e3\3\2\2\2\64\u0215\3\2\2\2\66\u0217\3\2\2\28\u021a"+ + "\3\2\2\2:\u0224\3\2\2\2<\u022a\3\2\2\2>\u0250\3\2\2\2@\u0267\3\2\2\2B"+ + "\u0273\3\2\2\2D\u0275\3\2\2\2F\u027c\3\2\2\2H\u0288\3\2\2\2J\u028a\3\2"+ + "\2\2L\u0296\3\2\2\2N\u0298\3\2\2\2P\u02ac\3\2\2\2R\u02c8\3\2\2\2T\u02ca"+ + "\3\2\2\2V\u02cc\3\2\2\2X\u02ce\3\2\2\2Z\u02db\3\2\2\2\\\u02dd\3\2\2\2"+ + "^\u02e4\3\2\2\2`\u02eb\3\2\2\2b\u02f9\3\2\2\2d\u02fd\3\2\2\2f\u0302\3"+ + "\2\2\2h\u0306\3\2\2\2j\u0308\3\2\2\2l\u030a\3\2\2\2no\5\6\4\2op\7\2\2"+ + "\3p\3\3\2\2\2qr\5,\27\2rs\7\2\2\3s\5\3\2\2\2t\u00da\5\b\5\2u\u0083\7 "+ + "\2\2v\177\7\3\2\2wx\7G\2\2x~\t\2\2\2yz\7$\2\2z~\t\3\2\2{|\7Y\2\2|~\5V"+ + ",\2}w\3\2\2\2}y\3\2\2\2}{\3\2\2\2~\u0081\3\2\2\2\177}\3\2\2\2\177\u0080"+ + "\3\2\2\2\u0080\u0082\3\2\2\2\u0081\177\3\2\2\2\u0082\u0084\7\4\2\2\u0083"+ + "v\3\2\2\2\u0083\u0084\3\2\2\2\u0084\u0085\3\2\2\2\u0085\u00da\5\6\4\2"+ + "\u0086\u0092\7\31\2\2\u0087\u008e\7\3\2\2\u0088\u0089\7G\2\2\u0089\u008d"+ + "\t\4\2\2\u008a\u008b\7$\2\2\u008b\u008d\t\3\2\2\u008c\u0088\3\2\2\2\u008c"+ + "\u008a\3\2\2\2\u008d\u0090\3\2\2\2\u008e\u008c\3\2\2\2\u008e\u008f\3\2"+ + "\2\2\u008f\u0091\3\2\2\2\u0090\u008e\3\2\2\2\u0091\u0093\7\4\2\2\u0092"+ + "\u0087\3\2\2\2\u0092\u0093\3\2\2\2\u0093\u0094\3\2\2\2\u0094\u00da\5\6"+ + "\4\2\u0095\u0096\7O\2\2\u0096\u0099\7R\2\2\u0097\u009a\5\66\34\2\u0098"+ + "\u009a\5b\62\2\u0099\u0097\3\2\2\2\u0099\u0098\3\2\2\2\u0099\u009a\3\2"+ + "\2\2\u009a\u00da\3\2\2\2\u009b\u009c\7O\2\2\u009c\u009d\7\23\2\2\u009d"+ + "\u00a0\t\5\2\2\u009e\u00a1\5\66\34\2\u009f\u00a1\5b\62\2\u00a0\u009e\3"+ + "\2\2\2\u00a0\u009f\3\2\2\2\u00a1\u00da\3\2\2\2\u00a2\u00a5\t\6\2\2\u00a3"+ + "\u00a6\5\66\34\2\u00a4\u00a6\5b\62\2\u00a5\u00a3\3\2\2\2\u00a5\u00a4\3"+ + "\2\2\2\u00a6\u00da\3\2\2\2\u00a7\u00a8\7O\2\2\u00a8\u00aa\7\'\2\2\u00a9"+ + "\u00ab\5\66\34\2\u00aa\u00a9\3\2\2\2\u00aa\u00ab\3\2\2\2\u00ab\u00da\3"+ + "\2\2\2\u00ac\u00ad\7O\2\2\u00ad\u00da\7K\2\2\u00ae\u00af\7P\2\2\u00af"+ + "\u00b2\7R\2\2\u00b0\u00b1\7\21\2\2\u00b1\u00b3\5\66\34\2\u00b2\u00b0\3"+ + "\2\2\2\u00b2\u00b3\3\2\2\2\u00b3\u00b6\3\2\2\2\u00b4\u00b7\5\66\34\2\u00b5"+ + "\u00b7\5b\62\2\u00b6\u00b4\3\2\2\2\u00b6\u00b5\3\2\2\2\u00b6\u00b7\3\2"+ + "\2\2\u00b7\u00c1\3\2\2\2\u00b8\u00b9\7V\2\2\u00b9\u00be\5j\66\2\u00ba"+ + "\u00bb\7\5\2\2\u00bb\u00bd\5j\66\2\u00bc\u00ba\3\2\2\2\u00bd\u00c0\3\2"+ + "\2\2\u00be\u00bc\3\2\2\2\u00be\u00bf\3\2\2\2\u00bf\u00c2\3\2\2\2\u00c0"+ + "\u00be\3\2\2\2\u00c1\u00b8\3\2\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00da\3\2"+ + "\2\2\u00c3\u00c4\7P\2\2\u00c4\u00c7\7\23\2\2\u00c5\u00c6\7\21\2\2\u00c6"+ + "\u00c8\5j\66\2\u00c7\u00c5\3\2\2\2\u00c7\u00c8\3\2\2\2\u00c8\u00cc\3\2"+ + "\2\2\u00c9\u00ca\7Q\2\2\u00ca\u00cd\5\66\34\2\u00cb\u00cd\5b\62\2\u00cc"+ + "\u00c9\3\2\2\2\u00cc\u00cb\3\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00cf\3\2"+ + "\2\2\u00ce\u00d0\5\66\34\2\u00cf\u00ce\3\2\2\2\u00cf\u00d0\3\2\2\2\u00d0"+ + "\u00da\3\2\2\2\u00d1\u00d2\7P\2\2\u00d2\u00d7\7W\2\2\u00d3\u00d5\t\7\2"+ + "\2\u00d4\u00d3\3\2\2\2\u00d4\u00d5\3\2\2\2\u00d5\u00d6\3\2\2\2\u00d6\u00d8"+ + "\5h\65\2\u00d7\u00d4\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\u00da\3\2\2\2\u00d9"+ + "t\3\2\2\2\u00d9u\3\2\2\2\u00d9\u0086\3\2\2\2\u00d9\u0095\3\2\2\2\u00d9"+ + "\u009b\3\2\2\2\u00d9\u00a2\3\2\2\2\u00d9\u00a7\3\2\2\2\u00d9\u00ac\3\2"+ + "\2\2\u00d9\u00ae\3\2\2\2\u00d9\u00c3\3\2\2\2\u00d9\u00d1\3\2\2\2\u00da"+ + "\7\3\2\2\2\u00db\u00dc\7[\2\2\u00dc\u00e1\5\34\17\2\u00dd\u00de\7\5\2"+ + "\2\u00de\u00e0\5\34\17\2\u00df\u00dd\3\2\2\2\u00e0\u00e3\3\2\2\2\u00e1"+ + "\u00df\3\2\2\2\u00e1\u00e2\3\2\2\2\u00e2\u00e5\3\2\2\2\u00e3\u00e1\3\2"+ + "\2\2\u00e4\u00db\3\2\2\2\u00e4\u00e5\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6"+ + "\u00e7\5\n\6\2\u00e7\t\3\2\2\2\u00e8\u00f3\5\16\b\2\u00e9\u00ea\7C\2\2"+ + "\u00ea\u00eb\7\17\2\2\u00eb\u00f0\5\20\t\2\u00ec\u00ed\7\5\2\2\u00ed\u00ef"+ + "\5\20\t\2\u00ee\u00ec\3\2\2\2\u00ef\u00f2\3\2\2\2\u00f0\u00ee\3\2\2\2"+ + "\u00f0\u00f1\3\2\2\2\u00f1\u00f4\3\2\2\2\u00f2\u00f0\3\2\2\2\u00f3\u00e9"+ + "\3\2\2\2\u00f3\u00f4\3\2\2\2\u00f4\u00f6\3\2\2\2\u00f5\u00f7\5\f\7\2\u00f6"+ + "\u00f5\3\2\2\2\u00f6\u00f7\3\2\2\2\u00f7\13\3\2\2\2\u00f8\u00f9\7\65\2"+ + "\2\u00f9\u00fe\t\b\2\2\u00fa\u00fb\7`\2\2\u00fb\u00fc\t\b\2\2\u00fc\u00fe"+ + "\7e\2\2\u00fd\u00f8\3\2\2\2\u00fd\u00fa\3\2\2\2\u00fe\r\3\2\2\2\u00ff"+ + "\u0105\5\22\n\2\u0100\u0101\7\3\2\2\u0101\u0102\5\n\6\2\u0102\u0103\7"+ + "\4\2\2\u0103\u0105\3\2\2\2\u0104\u00ff\3\2\2\2\u0104\u0100\3\2\2\2\u0105"+ + "\17\3\2\2\2\u0106\u0108\5,\27\2\u0107\u0109\t\t\2\2\u0108\u0107\3\2\2"+ + "\2\u0108\u0109\3\2\2\2\u0109\u010c\3\2\2\2\u010a\u010b\7?\2\2\u010b\u010d"+ + "\t\n\2\2\u010c\u010a\3\2\2\2\u010c\u010d\3\2\2\2\u010d\21\3\2\2\2\u010e"+ + "\u0110\7N\2\2\u010f\u0111\5\36\20\2\u0110\u010f\3\2\2\2\u0110\u0111\3"+ + "\2\2\2\u0111\u0112\3\2\2\2\u0112\u0117\5 \21\2\u0113\u0114\7\5\2\2\u0114"+ + "\u0116\5 \21\2\u0115\u0113\3\2\2\2\u0116\u0119\3\2\2\2\u0117\u0115\3\2"+ + "\2\2\u0117\u0118\3\2\2\2\u0118\u011b\3\2\2\2\u0119\u0117\3\2\2\2\u011a"+ + "\u011c\5\24\13\2\u011b\u011a\3\2\2\2\u011b\u011c\3\2\2\2\u011c\u011f\3"+ + "\2\2\2\u011d\u011e\7Z\2\2\u011e\u0120\5.\30\2\u011f\u011d\3\2\2\2\u011f"+ + "\u0120\3\2\2\2\u0120\u0124\3\2\2\2\u0121\u0122\7)\2\2\u0122\u0123\7\17"+ + "\2\2\u0123\u0125\5\26\f\2\u0124\u0121\3\2\2\2\u0124\u0125\3\2\2\2\u0125"+ + "\u0128\3\2\2\2\u0126\u0127\7*\2\2\u0127\u0129\5.\30\2\u0128\u0126\3\2"+ + "\2\2\u0128\u0129\3\2\2\2\u0129\23\3\2\2\2\u012a\u012b\7%\2\2\u012b\u0130"+ + "\5\"\22\2\u012c\u012d\7\5\2\2\u012d\u012f\5\"\22\2\u012e\u012c\3\2\2\2"+ + "\u012f\u0132\3\2\2\2\u0130\u012e\3\2\2\2\u0130\u0131\3\2\2\2\u0131\25"+ + "\3\2\2\2\u0132\u0130\3\2\2\2\u0133\u0135\5\36\20\2\u0134\u0133\3\2\2\2"+ + "\u0134\u0135\3\2\2\2\u0135\u0136\3\2\2\2\u0136\u013b\5\30\r\2\u0137\u0138"+ + "\7\5\2\2\u0138\u013a\5\30\r\2\u0139\u0137\3\2\2\2\u013a\u013d\3\2\2\2"+ + "\u013b\u0139\3\2\2\2\u013b\u013c\3\2\2\2\u013c\27\3\2\2\2\u013d\u013b"+ + "\3\2\2\2\u013e\u013f\5\32\16\2\u013f\31\3\2\2\2\u0140\u0149\7\3\2\2\u0141"+ + "\u0146\5,\27\2\u0142\u0143\7\5\2\2\u0143\u0145\5,\27\2\u0144\u0142\3\2"+ + "\2\2\u0145\u0148\3\2\2\2\u0146\u0144\3\2\2\2\u0146\u0147\3\2\2\2\u0147"+ + "\u014a\3\2\2\2\u0148\u0146\3\2\2\2\u0149\u0141\3\2\2\2\u0149\u014a\3\2"+ + "\2\2\u014a\u014b\3\2\2\2\u014b\u014e\7\4\2\2\u014c\u014e\5,\27\2\u014d"+ + "\u0140\3\2\2\2\u014d\u014c\3\2\2\2\u014e\33\3\2\2\2\u014f\u0150\5`\61"+ + "\2\u0150\u0151\7\f\2\2\u0151\u0152\7\3\2\2\u0152\u0153\5\n\6\2\u0153\u0154"+ + "\7\4\2\2\u0154\35\3\2\2\2\u0155\u0156\t\13\2\2\u0156\37\3\2\2\2\u0157"+ + "\u015c\5,\27\2\u0158\u015a\7\f\2\2\u0159\u0158\3\2\2\2\u0159\u015a\3\2"+ + "\2\2\u015a\u015b\3\2\2\2\u015b\u015d\5`\61\2\u015c\u0159\3\2\2\2\u015c"+ + "\u015d\3\2\2\2\u015d!\3\2\2\2\u015e\u0162\5*\26\2\u015f\u0161\5$\23\2"+ + "\u0160\u015f\3\2\2\2\u0161\u0164\3\2\2\2\u0162\u0160\3\2\2\2\u0162\u0163"+ + "\3\2\2\2\u0163#\3\2\2\2\u0164\u0162\3\2\2\2\u0165\u0166\5&\24\2\u0166"+ + "\u0167\7\61\2\2\u0167\u0169\5*\26\2\u0168\u016a\5(\25\2\u0169\u0168\3"+ + "\2\2\2\u0169\u016a\3\2\2\2\u016a\u0171\3\2\2\2\u016b\u016c\7<\2\2\u016c"+ + "\u016d\5&\24\2\u016d\u016e\7\61\2\2\u016e\u016f\5*\26\2\u016f\u0171\3"+ + "\2\2\2\u0170\u0165\3\2\2\2\u0170\u016b\3\2\2\2\u0171%\3\2\2\2\u0172\u0174"+ + "\7.\2\2\u0173\u0172\3\2\2\2\u0173\u0174\3\2\2\2\u0174\u0182\3\2\2\2\u0175"+ + "\u0177\7\63\2\2\u0176\u0178\7D\2\2\u0177\u0176\3\2\2\2\u0177\u0178\3\2"+ + "\2\2\u0178\u0182\3\2\2\2\u0179\u017b\7H\2\2\u017a\u017c\7D\2\2\u017b\u017a"+ + "\3\2\2\2\u017b\u017c\3\2\2\2\u017c\u0182\3\2\2\2\u017d\u017f\7&\2\2\u017e"+ + "\u0180\7D\2\2\u017f\u017e\3\2\2\2\u017f\u0180\3\2\2\2\u0180\u0182\3\2"+ + "\2\2\u0181\u0173\3\2\2\2\u0181\u0175\3\2\2\2\u0181\u0179\3\2\2\2\u0181"+ + "\u017d\3\2\2\2\u0182\'\3\2\2\2\u0183\u0184\7@\2\2\u0184\u0192\5.\30\2"+ + "\u0185\u0186\7X\2\2\u0186\u0187\7\3\2\2\u0187\u018c\5`\61\2\u0188\u0189"+ + "\7\5\2\2\u0189\u018b\5`\61\2\u018a\u0188\3\2\2\2\u018b\u018e\3\2\2\2\u018c"+ + "\u018a\3\2\2\2\u018c\u018d\3\2\2\2\u018d\u018f\3\2\2\2\u018e\u018c\3\2"+ + "\2\2\u018f\u0190\7\4\2\2\u0190\u0192\3\2\2\2\u0191\u0183\3\2\2\2\u0191"+ + "\u0185\3\2\2\2\u0192)\3\2\2\2\u0193\u0198\5b\62\2\u0194\u0196\7\f\2\2"+ + "\u0195\u0194\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0197\3\2\2\2\u0197\u0199"+ + "\5^\60\2\u0198\u0195\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u01ad\3\2\2\2\u019a"+ + "\u019b\7\3\2\2\u019b\u019c\5\n\6\2\u019c\u01a1\7\4\2\2\u019d\u019f\7\f"+ + "\2\2\u019e\u019d\3\2\2\2\u019e\u019f\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0"+ + "\u01a2\5^\60\2\u01a1\u019e\3\2\2\2\u01a1\u01a2\3\2\2\2\u01a2\u01ad\3\2"+ + "\2\2\u01a3\u01a4\7\3\2\2\u01a4\u01a5\5\"\22\2\u01a5\u01aa\7\4\2\2\u01a6"+ + "\u01a8\7\f\2\2\u01a7\u01a6\3\2\2\2\u01a7\u01a8\3\2\2\2\u01a8\u01a9\3\2"+ + "\2\2\u01a9\u01ab\5^\60\2\u01aa\u01a7\3\2\2\2\u01aa\u01ab\3\2\2\2\u01ab"+ + "\u01ad\3\2\2\2\u01ac\u0193\3\2\2\2\u01ac\u019a\3\2\2\2\u01ac\u01a3\3\2"+ + "\2\2\u01ad+\3\2\2\2\u01ae\u01af\5.\30\2\u01af-\3\2\2\2\u01b0\u01b1\b\30"+ + "\1\2\u01b1\u01b2\7=\2\2\u01b2\u01d0\5.\30\n\u01b3\u01b4\7\37\2\2\u01b4"+ + "\u01b5\7\3\2\2\u01b5\u01b6\5\b\5\2\u01b6\u01b7\7\4\2\2\u01b7\u01d0\3\2"+ + "\2\2\u01b8\u01b9\7J\2\2\u01b9\u01ba\7\3\2\2\u01ba\u01bb\5j\66\2\u01bb"+ + "\u01bc\5\60\31\2\u01bc\u01bd\7\4\2\2\u01bd\u01d0\3\2\2\2\u01be\u01bf\7"+ + "\67\2\2\u01bf\u01c0\7\3\2\2\u01c0\u01c1\5^\60\2\u01c1\u01c2\7\5\2\2\u01c2"+ + "\u01c3\5j\66\2\u01c3\u01c4\5\60\31\2\u01c4\u01c5\7\4\2\2\u01c5\u01d0\3"+ + "\2\2\2\u01c6\u01c7\7\67\2\2\u01c7\u01c8\7\3\2\2\u01c8\u01c9\5j\66\2\u01c9"+ + "\u01ca\7\5\2\2\u01ca\u01cb\5j\66\2\u01cb\u01cc\5\60\31\2\u01cc\u01cd\7"+ + "\4\2\2\u01cd\u01d0\3\2\2\2\u01ce\u01d0\5\62\32\2\u01cf\u01b0\3\2\2\2\u01cf"+ + "\u01b3\3\2\2\2\u01cf\u01b8\3\2\2\2\u01cf\u01be\3\2\2\2\u01cf\u01c6\3\2"+ + "\2\2\u01cf\u01ce\3\2\2\2\u01d0\u01d9\3\2\2\2\u01d1\u01d2\f\4\2\2\u01d2"+ + "\u01d3\7\n\2\2\u01d3\u01d8\5.\30\5\u01d4\u01d5\f\3\2\2\u01d5\u01d6\7B"+ + "\2\2\u01d6\u01d8\5.\30\4\u01d7\u01d1\3\2\2\2\u01d7\u01d4\3\2\2\2\u01d8"+ + "\u01db\3\2\2\2\u01d9\u01d7\3\2\2\2\u01d9\u01da\3\2\2\2\u01da/\3\2\2\2"+ + "\u01db\u01d9\3\2\2\2\u01dc\u01dd\7\5\2\2\u01dd\u01df\5j\66\2\u01de\u01dc"+ + "\3\2\2\2\u01df\u01e2\3\2\2\2\u01e0\u01de\3\2\2\2\u01e0\u01e1\3\2\2\2\u01e1"+ + "\61\3\2\2\2\u01e2\u01e0\3\2\2\2\u01e3\u01e5\5<\37\2\u01e4\u01e6\5\64\33"+ + "\2\u01e5\u01e4\3\2\2\2\u01e5\u01e6\3\2\2\2\u01e6\63\3\2\2\2\u01e7\u01e9"+ + "\7=\2\2\u01e8\u01e7\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01ea\3\2\2\2\u01ea"+ + "\u01eb\7\16\2\2\u01eb\u01ec\5<\37\2\u01ec\u01ed\7\n\2\2\u01ed\u01ee\5"+ + "<\37\2\u01ee\u0216\3\2\2\2\u01ef\u01f1\7=\2\2\u01f0\u01ef\3\2\2\2\u01f0"+ + "\u01f1\3\2\2\2\u01f1\u01f2\3\2\2\2\u01f2\u01f3\7-\2\2\u01f3\u01f4\7\3"+ + "\2\2\u01f4\u01f9\5<\37\2\u01f5\u01f6\7\5\2\2\u01f6\u01f8\5<\37\2\u01f7"+ + "\u01f5\3\2\2\2\u01f8\u01fb\3\2\2\2\u01f9\u01f7\3\2\2\2\u01f9\u01fa\3\2"+ + "\2\2\u01fa\u01fc\3\2\2\2\u01fb\u01f9\3\2\2\2\u01fc\u01fd\7\4\2\2\u01fd"+ + "\u0216\3\2\2\2\u01fe\u0200\7=\2\2\u01ff\u01fe\3\2\2\2\u01ff\u0200\3\2"+ + "\2\2\u0200\u0201\3\2\2\2\u0201\u0202\7-\2\2\u0202\u0203\7\3\2\2\u0203"+ + "\u0204\5\b\5\2\u0204\u0205\7\4\2\2\u0205\u0216\3\2\2\2\u0206\u0208\7="+ + "\2\2\u0207\u0206\3\2\2\2\u0207\u0208\3\2\2\2\u0208\u0209\3\2\2\2\u0209"+ + "\u020a\7\64\2\2\u020a\u0216\58\35\2\u020b\u020d\7=\2\2\u020c\u020b\3\2"+ + "\2\2\u020c\u020d\3\2\2\2\u020d\u020e\3\2\2\2\u020e\u020f\7I\2\2\u020f"+ + "\u0216\5j\66\2\u0210\u0212\7\60\2\2\u0211\u0213\7=\2\2\u0212\u0211\3\2"+ + "\2\2\u0212\u0213\3\2\2\2\u0213\u0214\3\2\2\2\u0214\u0216\7>\2\2\u0215"+ + "\u01e8\3\2\2\2\u0215\u01f0\3\2\2\2\u0215\u01ff\3\2\2\2\u0215\u0207\3\2"+ + "\2\2\u0215\u020c\3\2\2\2\u0215\u0210\3\2\2\2\u0216\65\3\2\2\2\u0217\u0218"+ + "\7\64\2\2\u0218\u0219\58\35\2\u0219\67\3\2\2\2\u021a\u021c\5j\66\2\u021b"+ + "\u021d\5:\36\2\u021c\u021b\3\2\2\2\u021c\u021d\3\2\2\2\u021d9\3\2\2\2"+ + "\u021e\u021f\7\35\2\2\u021f\u0225\5j\66\2\u0220\u0221\7^\2\2\u0221\u0222"+ + "\5j\66\2\u0222\u0223\7e\2\2\u0223\u0225\3\2\2\2\u0224\u021e\3\2\2\2\u0224"+ + "\u0220\3\2\2\2\u0225;\3\2\2\2\u0226\u0227\b\37\1\2\u0227\u022b\5> \2\u0228"+ + "\u0229\t\7\2\2\u0229\u022b\5<\37\6\u022a\u0226\3\2\2\2\u022a\u0228\3\2"+ + "\2\2\u022b\u0238\3\2\2\2\u022c\u022d\f\5\2\2\u022d\u022e\t\f\2\2\u022e"+ + "\u0237\5<\37\6\u022f\u0230\f\4\2\2\u0230\u0231\t\7\2\2\u0231\u0237\5<"+ + "\37\5\u0232\u0233\f\3\2\2\u0233\u0234\5T+\2\u0234\u0235\5<\37\4\u0235"+ + "\u0237\3\2\2\2\u0236\u022c\3\2\2\2\u0236\u022f\3\2\2\2\u0236\u0232\3\2"+ + "\2\2\u0237\u023a\3\2\2\2\u0238\u0236\3\2\2\2\u0238\u0239\3\2\2\2\u0239"+ + "=\3\2\2\2\u023a\u0238\3\2\2\2\u023b\u023c\b \1\2\u023c\u0251\5B\"\2\u023d"+ + "\u0251\5H%\2\u023e\u0251\5@!\2\u023f\u0251\5R*\2\u0240\u0241\5^\60\2\u0241"+ + "\u0242\7t\2\2\u0242\u0244\3\2\2\2\u0243\u0240\3\2\2\2\u0243\u0244\3\2"+ + "\2\2\u0244\u0245\3\2\2\2\u0245\u0251\7o\2\2\u0246\u0251\5L\'\2\u0247\u0248"+ + "\7\3\2\2\u0248\u0249\5\b\5\2\u0249\u024a\7\4\2\2\u024a\u0251\3\2\2\2\u024b"+ + "\u0251\5^\60\2\u024c\u024d\7\3\2\2\u024d\u024e\5,\27\2\u024e\u024f\7\4"+ + "\2\2\u024f\u0251\3\2\2\2\u0250\u023b\3\2\2\2\u0250\u023d\3\2\2\2\u0250"+ + "\u023e\3\2\2\2\u0250\u023f\3\2\2\2\u0250\u0243\3\2\2\2\u0250\u0246\3\2"+ + "\2\2\u0250\u0247\3\2\2\2\u0250\u024b\3\2\2\2\u0250\u024c\3\2\2\2\u0251"+ + "\u0257\3\2\2\2\u0252\u0253\f\13\2\2\u0253\u0254\7r\2\2\u0254\u0256\5\\"+ + "/\2\u0255\u0252\3\2\2\2\u0256\u0259\3\2\2\2\u0257\u0255\3\2\2\2\u0257"+ + "\u0258\3\2\2\2\u0258?\3\2\2\2\u0259\u0257\3\2\2\2\u025a\u025d\7\25\2\2"+ + "\u025b\u025c\7\3\2\2\u025c\u025e\7\4\2\2\u025d\u025b\3\2\2\2\u025d\u025e"+ + "\3\2\2\2\u025e\u0268\3\2\2\2\u025f\u0265\7\26\2\2\u0260\u0262\7\3\2\2"+ + "\u0261\u0263\7w\2\2\u0262\u0261\3\2\2\2\u0262\u0263\3\2\2\2\u0263\u0264"+ + "\3\2\2\2\u0264\u0266\7\4\2\2\u0265\u0260\3\2\2\2\u0265\u0266\3\2\2\2\u0266"+ + "\u0268\3\2\2\2\u0267\u025a\3\2\2\2\u0267\u025f\3\2\2\2\u0268A\3\2\2\2"+ + "\u0269\u0274\5D#\2\u026a\u026b\7_\2\2\u026b\u026c\5D#\2\u026c\u026d\7"+ + "e\2\2\u026d\u0274\3\2\2\2\u026e\u0274\5F$\2\u026f\u0270\7_\2\2\u0270\u0271"+ + "\5F$\2\u0271\u0272\7e\2\2\u0272\u0274\3\2\2\2\u0273\u0269\3\2\2\2\u0273"+ + "\u026a\3\2\2\2\u0273\u026e\3\2\2\2\u0273\u026f\3\2\2\2\u0274C\3\2\2\2"+ + "\u0275\u0276\7\20\2\2\u0276\u0277\7\3\2\2\u0277\u0278\5,\27\2\u0278\u0279"+ + "\7\f\2\2\u0279\u027a\5\\/\2\u027a\u027b\7\4\2\2\u027bE\3\2\2\2\u027c\u027d"+ + "\7\24\2\2\u027d\u027e\7\3\2\2\u027e\u027f\5,\27\2\u027f\u0280\7\5\2\2"+ + "\u0280\u0281\5\\/\2\u0281\u0282\7\4\2\2\u0282G\3\2\2\2\u0283\u0289\5J"+ + "&\2\u0284\u0285\7_\2\2\u0285\u0286\5J&\2\u0286\u0287\7e\2\2\u0287\u0289"+ + "\3\2\2\2\u0288\u0283\3\2\2\2\u0288\u0284\3\2\2\2\u0289I\3\2\2\2\u028a"+ + "\u028b\7!\2\2\u028b\u028c\7\3\2\2\u028c\u028d\5`\61\2\u028d\u028e\7%\2"+ + "\2\u028e\u028f\5<\37\2\u028f\u0290\7\4\2\2\u0290K\3\2\2\2\u0291\u0297"+ + "\5N(\2\u0292\u0293\7_\2\2\u0293\u0294\5N(\2\u0294\u0295\7e\2\2\u0295\u0297"+ + "\3\2\2\2\u0296\u0291\3\2\2\2\u0296\u0292\3\2\2\2\u0297M\3\2\2\2\u0298"+ + "\u0299\5P)\2\u0299\u02a5\7\3\2\2\u029a\u029c\5\36\20\2\u029b\u029a\3\2"+ + "\2\2\u029b\u029c\3\2\2\2\u029c\u029d\3\2\2\2\u029d\u02a2\5,\27\2\u029e"+ + "\u029f\7\5\2\2\u029f\u02a1\5,\27\2\u02a0\u029e\3\2\2\2\u02a1\u02a4\3\2"+ + "\2\2\u02a2\u02a0\3\2\2\2\u02a2\u02a3\3\2\2\2\u02a3\u02a6\3\2\2\2\u02a4"+ + "\u02a2\3\2\2\2\u02a5\u029b\3\2\2\2\u02a5\u02a6\3\2\2\2\u02a6\u02a7\3\2"+ + "\2\2\u02a7\u02a8\7\4\2\2\u02a8O\3\2\2\2\u02a9\u02ad\7\63\2\2\u02aa\u02ad"+ + "\7H\2\2\u02ab\u02ad\5`\61\2\u02ac\u02a9\3\2\2\2\u02ac\u02aa\3\2\2\2\u02ac"+ + "\u02ab\3\2\2\2\u02adQ\3\2\2\2\u02ae\u02c9\7>\2\2\u02af\u02c9\5X-\2\u02b0"+ + "\u02c9\5h\65\2\u02b1\u02c9\5V,\2\u02b2\u02b4\7v\2\2\u02b3\u02b2\3\2\2"+ + "\2\u02b4\u02b5\3\2\2\2\u02b5\u02b3\3\2\2\2\u02b5\u02b6\3\2\2\2\u02b6\u02c9"+ + "\3\2\2\2\u02b7\u02c9\7u\2\2\u02b8\u02b9\7a\2\2\u02b9\u02ba\5j\66\2\u02ba"+ + "\u02bb\7e\2\2\u02bb\u02c9\3\2\2\2\u02bc\u02bd\7b\2\2\u02bd\u02be\5j\66"+ + "\2\u02be\u02bf\7e\2\2\u02bf\u02c9\3\2\2\2\u02c0\u02c1\7c\2\2\u02c1\u02c2"+ + "\5j\66\2\u02c2\u02c3\7e\2\2\u02c3\u02c9\3\2\2\2\u02c4\u02c5\7d\2\2\u02c5"+ + "\u02c6\5j\66\2\u02c6\u02c7\7e\2\2\u02c7\u02c9\3\2\2\2\u02c8\u02ae\3\2"+ + "\2\2\u02c8\u02af\3\2\2\2\u02c8\u02b0\3\2\2\2\u02c8\u02b1\3\2\2\2\u02c8"+ + "\u02b3\3\2\2\2\u02c8\u02b7\3\2\2\2\u02c8\u02b8\3\2\2\2\u02c8\u02bc\3\2"+ + "\2\2\u02c8\u02c0\3\2\2\2\u02c8\u02c4\3\2\2\2\u02c9S\3\2\2\2\u02ca\u02cb"+ + "\t\r\2\2\u02cbU\3\2\2\2\u02cc\u02cd\t\16\2\2\u02cdW\3\2\2\2\u02ce\u02d0"+ + "\7/\2\2\u02cf\u02d1\t\7\2\2\u02d0\u02cf\3\2\2\2\u02d0\u02d1\3\2\2\2\u02d1"+ + "\u02d4\3\2\2\2\u02d2\u02d5\5h\65\2\u02d3\u02d5\5j\66\2\u02d4\u02d2\3\2"+ + "\2\2\u02d4\u02d3\3\2\2\2\u02d5\u02d6\3\2\2\2\u02d6\u02d9\5Z.\2\u02d7\u02d8"+ + "\7U\2\2\u02d8\u02da\5Z.\2\u02d9\u02d7\3\2\2\2\u02d9\u02da\3\2\2\2\u02da"+ + "Y\3\2\2\2\u02db\u02dc\t\17\2\2\u02dc[\3\2\2\2\u02dd\u02de\5`\61\2\u02de"+ + "]\3\2\2\2\u02df\u02e0\5`\61\2\u02e0\u02e1\7t\2\2\u02e1\u02e3\3\2\2\2\u02e2"+ + "\u02df\3\2\2\2\u02e3\u02e6\3\2\2\2\u02e4\u02e2\3\2\2\2\u02e4\u02e5\3\2"+ + "\2\2\u02e5\u02e7\3\2\2\2\u02e6\u02e4\3\2\2\2\u02e7\u02e8\5`\61\2\u02e8"+ + "_\3\2\2\2\u02e9\u02ec\5d\63\2\u02ea\u02ec\5f\64\2\u02eb\u02e9\3\2\2\2"+ + "\u02eb\u02ea\3\2\2\2\u02eca\3\2\2\2\u02ed\u02ee\5`\61\2\u02ee\u02ef\7"+ + "\6\2\2\u02ef\u02f1\3\2\2\2\u02f0\u02ed\3\2\2\2\u02f0\u02f1\3\2\2\2\u02f1"+ + "\u02f2\3\2\2\2\u02f2\u02fa\7{\2\2\u02f3\u02f4\5`\61\2\u02f4\u02f5\7\6"+ + "\2\2\u02f5\u02f7\3\2\2\2\u02f6\u02f3\3\2\2\2\u02f6\u02f7\3\2\2\2\u02f7"+ + "\u02f8\3\2\2\2\u02f8\u02fa\5`\61\2\u02f9\u02f0\3\2\2\2\u02f9\u02f6\3\2"+ + "\2\2\u02fac\3\2\2\2\u02fb\u02fe\7|\2\2\u02fc\u02fe\7}\2\2\u02fd\u02fb"+ + "\3\2\2\2\u02fd\u02fc\3\2\2\2\u02fee\3\2\2\2\u02ff\u0303\7y\2\2\u0300\u0303"+ + "\5l\67\2\u0301\u0303\7z\2\2\u0302\u02ff\3\2\2\2\u0302\u0300\3\2\2\2\u0302"+ + "\u0301\3\2\2\2\u0303g\3\2\2\2\u0304\u0307\7x\2\2\u0305\u0307\7w\2\2\u0306"+ + "\u0304\3\2\2\2\u0306\u0305\3\2\2\2\u0307i\3\2\2\2\u0308\u0309\t\20\2\2"+ + "\u0309k\3\2\2\2\u030a\u030b\t\21\2\2\u030bm\3\2\2\2k}\177\u0083\u008c"+ "\u008e\u0092\u0099\u00a0\u00a5\u00aa\u00b2\u00b6\u00be\u00c1\u00c7\u00cc"+ "\u00cf\u00d4\u00d7\u00d9\u00e1\u00e4\u00f0\u00f3\u00f6\u00fd\u0104\u0108"+ "\u010c\u0110\u0117\u011b\u011f\u0124\u0128\u0130\u0134\u013b\u0146\u0149"+ "\u014d\u0159\u015c\u0162\u0169\u0170\u0173\u0177\u017b\u017f\u0181\u018c"+ "\u0191\u0195\u0198\u019e\u01a1\u01a7\u01aa\u01ac\u01cf\u01d7\u01d9\u01e0"+ "\u01e5\u01e8\u01f0\u01f9\u01ff\u0207\u020c\u0212\u0215\u021c\u0224\u022a"+ - "\u0239\u023b\u0245\u0252\u0257\u025c\u025f\u0261\u026d\u0282\u0290\u0295"+ - "\u029c\u029f\u02a6\u02af\u02c2\u02ca\u02ce\u02d3\u02de\u02e5\u02ea\u02f0"+ - "\u02f3\u02f7\u02fc\u0300"; + "\u0236\u0238\u0243\u0250\u0257\u025d\u0262\u0265\u0267\u0273\u0288\u0296"+ + "\u029b\u02a2\u02a5\u02ac\u02b5\u02c8\u02d0\u02d4\u02d9\u02e4\u02eb\u02f0"+ + "\u02f6\u02f9\u02fd\u0302\u0306"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index 8e8b9e5ab9ed7..56310aa66eb7c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -330,13 +330,6 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitComparison(SqlBaseParser.ComparisonContext ctx); - /** - * Visit a parse tree produced by the {@code castOperatorExpression} - * labeled alternative in {@link SqlBaseParser#valueExpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx); /** * Visit a parse tree produced by the {@code arithmeticBinary} * labeled alternative in {@link SqlBaseParser#valueExpression}. @@ -351,6 +344,13 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitArithmeticUnary(SqlBaseParser.ArithmeticUnaryContext ctx); + /** + * Visit a parse tree produced by the {@code dereference} + * labeled alternative in {@link SqlBaseParser#primaryExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDereference(SqlBaseParser.DereferenceContext ctx); /** * Visit a parse tree produced by the {@code cast} * labeled alternative in {@link SqlBaseParser#primaryExpression}. @@ -359,26 +359,26 @@ interface SqlBaseVisitor extends ParseTreeVisitor { */ T visitCast(SqlBaseParser.CastContext ctx); /** - * Visit a parse tree produced by the {@code extract} + * Visit a parse tree produced by the {@code constantDefault} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitExtract(SqlBaseParser.ExtractContext ctx); + T visitConstantDefault(SqlBaseParser.ConstantDefaultContext ctx); /** - * Visit a parse tree produced by the {@code currentDateTimeFunction} + * Visit a parse tree produced by the {@code extract} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx); + T visitExtract(SqlBaseParser.ExtractContext ctx); /** - * Visit a parse tree produced by the {@code constantDefault} + * Visit a parse tree produced by the {@code parenthesizedExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitConstantDefault(SqlBaseParser.ConstantDefaultContext ctx); + T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); /** * Visit a parse tree produced by the {@code star} * labeled alternative in {@link SqlBaseParser#primaryExpression}. @@ -387,33 +387,33 @@ interface SqlBaseVisitor extends ParseTreeVisitor { */ T visitStar(SqlBaseParser.StarContext ctx); /** - * Visit a parse tree produced by the {@code function} + * Visit a parse tree produced by the {@code castOperatorExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitFunction(SqlBaseParser.FunctionContext ctx); + T visitCastOperatorExpression(SqlBaseParser.CastOperatorExpressionContext ctx); /** - * Visit a parse tree produced by the {@code subqueryExpression} + * Visit a parse tree produced by the {@code function} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx); + T visitFunction(SqlBaseParser.FunctionContext ctx); /** - * Visit a parse tree produced by the {@code dereference} + * Visit a parse tree produced by the {@code currentDateTimeFunction} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitDereference(SqlBaseParser.DereferenceContext ctx); + T visitCurrentDateTimeFunction(SqlBaseParser.CurrentDateTimeFunctionContext ctx); /** - * Visit a parse tree produced by the {@code parenthesizedExpression} + * Visit a parse tree produced by the {@code subqueryExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); + T visitSubqueryExpression(SqlBaseParser.SubqueryExpressionContext ctx); /** * Visit a parse tree produced by {@link SqlBaseParser#builtinDateTimeFunction}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java index 3ffcedae0a6ac..112aabf55dac5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -282,6 +282,19 @@ public void testCastWithInvalidDataType() { assertEquals("line 1:12: Does not recognize type [InVaLiD]", ex.getMessage()); } + public void testCastOperatorPrecedence() { + Expression expr = parser.createExpression("(10* 2::long)"); + assertEquals(Mul.class, expr.getClass()); + Mul mul = (Mul) expr; + assertEquals(DataType.LONG, mul.dataType()); + assertEquals(DataType.INTEGER, mul.left().dataType()); + assertEquals(Cast.class, mul.right().getClass()); + Cast cast = (Cast) mul.right(); + assertEquals(DataType.INTEGER, cast.from()); + assertEquals(DataType.LONG, cast.to()); + assertEquals(DataType.LONG, cast.dataType()); + } + public void testCastOperatorWithUnquotedDataType() { Expression expr = parser.createExpression("(10* 2)::long"); assertEquals(Cast.class, expr.getClass()); From 50e47e30272b6735d9ecfc5ce81cdecc6730dc35 Mon Sep 17 00:00:00 2001 From: Karel Minarik Date: Sat, 30 Mar 2019 15:26:05 +0100 Subject: [PATCH 43/63] [DOCS] Add documentation for the Go client (#40448) Related: * https://github.com/elastic/elasticsearch/pull/39379 * https://github.com/elastic/docs/pull/644 --- docs/go/index.asciidoc | 74 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 71 insertions(+), 3 deletions(-) diff --git a/docs/go/index.asciidoc b/docs/go/index.asciidoc index 9cde6d128de41..f8977dea91950 100644 --- a/docs/go/index.asciidoc +++ b/docs/go/index.asciidoc @@ -2,7 +2,75 @@ == Overview -* https://github.com/elastic/go-elasticsearch -* https://godoc.org/github.com/elastic/go-elasticsearch -* https://github.com/elastic/go-elasticsearch/tree/master/_examples +An official Go client for Elasticsearch. +Full documentation is hosted at https://github.com/elastic/go-elasticsearch[GitHub] +and https://godoc.org/github.com/elastic/go-elasticsearch[GoDoc] +-- this page provides only an overview. + +.Work In Progress +************************************************************************************ +The client is currently available as a public preview. We encourage you to try the +package in your projects, but please be aware that the public API may change. +************************************************************************************ + +=== Elasticsearch Version Compatibility + +The client major versions correspond to the Elasticsearch major versions: +to connect to Elasticsearch `6.x`, use a `6.x` version of the client, +to connect to Elasticsearch `7.x`, use a `7.x` version of the client, and so on. + +The `master` branch of the client is compatible with the `master` branch of Elasticsearch. + +=== Installation + +Add the package to your `go.mod` file: + +[source,text] +------------------------------------ +require github.com/elastic/go-elasticsearch {VERSION} +------------------------------------ + +=== Usage + +[source,go] +------------------------------------ +package main + +import ( + "log" + + "github.com/elastic/go-elasticsearch" +) + +func main() { + es, _ := elasticsearch.NewDefaultClient() + log.Println(es.Info()) +} +------------------------------------ + +[NOTE] +Please have a look at the collection of comprehensive examples in the repository +at https://github.com/elastic/go-elasticsearch/tree/master/_examples. + +== Resources + +* https://github.com/elastic/go-elasticsearch[Source Code] +* https://godoc.org/github.com/elastic/go-elasticsearch[API Documentation] +* https://github.com/elastic/go-elasticsearch/tree/master/_examples[Examples and Recipes] + +== License + +Copyright 2019-present Elasticsearch + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. From def37a6f4afbeaf7defb8625d2c5681caf786b50 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Sat, 30 Mar 2019 10:56:23 -0400 Subject: [PATCH 44/63] Add information about the default sort mode (#40657) --- docs/reference/search/request/sort.asciidoc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index 4242925d9a8d1..c12ec3a679a72 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -82,6 +82,10 @@ to. The `mode` option can have the following values: `median`:: Use the median of all values as sort value. Only applicable for number based array fields. +The default sort mode in the ascending sort order is `min` -- the lowest value +is picked. The default sort mode in the descending order is `max` -- +the highest value is picked. + ===== Sort mode example usage In the example below the field price has multiple prices per document. From c23580f477ffc61c5701e14a91006db7bf21a8d4 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Sat, 30 Mar 2019 16:56:44 +0100 Subject: [PATCH 45/63] SQL: [Tests] Enable integration tests for fixed issues (#40664) Enable some Ignored integration tests for issues/features that have already been resolved/implemented. --- x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec | 9 +++------ x-pack/plugin/sql/qa/src/main/resources/select.sql-spec | 6 ++---- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec index 806e92b1d88d2..e24297f7fa9b3 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec @@ -51,18 +51,15 @@ groupByModScalar SELECT (emp_no % 3) + 1 AS e FROM test_emp GROUP BY e ORDER BY e; // group by nested functions with no alias -//https://github.com/elastic/elasticsearch/issues/40239 -groupByTruncate-Ignore +groupByTruncate SELECT CAST(TRUNCATE(EXTRACT(YEAR FROM "birth_date")) AS INTEGER) FROM test_emp GROUP BY CAST(TRUNCATE(EXTRACT(YEAR FROM "birth_date")) AS INTEGER) ORDER BY CAST(TRUNCATE(EXTRACT(YEAR FROM "birth_date")) AS INTEGER); -//https://github.com/elastic/elasticsearch/issues/40239 -groupByRound-Ignore +groupByRound SELECT CAST(ROUND(EXTRACT(YEAR FROM "birth_date")) AS INTEGER) FROM test_emp GROUP BY CAST(ROUND(EXTRACT(YEAR FROM "birth_date")) AS INTEGER) ORDER BY CAST(ROUND(EXTRACT(YEAR FROM "birth_date")) AS INTEGER); groupByAtan2 SELECT ATAN2(YEAR("birth_date"), 5) FROM test_emp GROUP BY ATAN2(YEAR("birth_date"), 5) ORDER BY ATAN2(YEAR("birth_date"), 5); groupByPower SELECT POWER(YEAR("birth_date"), 2) FROM test_emp GROUP BY POWER(YEAR("birth_date"), 2) ORDER BY POWER(YEAR("birth_date"), 2); -//https://github.com/elastic/elasticsearch/issues/40239 -groupByPowerWithCast-Ignore +groupByPowerWithCast SELECT CAST(POWER(YEAR("birth_date"), 2) AS DOUBLE) FROM test_emp GROUP BY CAST(POWER(YEAR("birth_date"), 2) AS DOUBLE) ORDER BY CAST(POWER(YEAR("birth_date"), 2) AS DOUBLE); groupByConcat SELECT LEFT(CONCAT("first_name", "last_name"), 3) FROM test_emp GROUP BY LEFT(CONCAT("first_name", "last_name"), 3) ORDER BY LEFT(CONCAT("first_name", "last_name"), 3) LIMIT 15; diff --git a/x-pack/plugin/sql/qa/src/main/resources/select.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/select.sql-spec index 82bbbf12e961e..9f9731efcc5b9 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/select.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/select.sql-spec @@ -108,9 +108,7 @@ selectMathPIFromIndexWithWhereEvaluatingToTrue SELECT PI() AS pi FROM test_emp WHERE ROUND(PI(),2)=3.14; selectMathPIFromIndexWithWhereEvaluatingToTrueAndWithLimit SELECT PI() AS pi FROM test_emp WHERE ROUND(PI(),2)=3.14 LIMIT 3; -// AwaitsFix https://github.com/elastic/elasticsearch/issues/35980 -selectMathPIFromIndexWithWhereEvaluatingToFalse-Ignore +selectMathPIFromIndexWithWhereEvaluatingToFalse SELECT PI() AS pi FROM test_emp WHERE PI()=5; -// AwaitsFix https://github.com/elastic/elasticsearch/issues/35980 -selectMathPIFromIndexWithWhereEvaluatingToFalseAndWithLimit-Ignore +selectMathPIFromIndexWithWhereEvaluatingToFalseAndWithLimit SELECT PI() AS pi FROM test_emp WHERE PI()=5 LIMIT 3; From b3341da0779673c106db526ec58c4449bda59dc2 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 31 Mar 2019 19:42:22 -0400 Subject: [PATCH 46/63] Fix bug in detecting use of bundled JDK on macOS This commit fixes a bug in detecting the use of the bundled JDK on macOS. This bug arose because the path of Java home is different on macOS. --- .../main/java/org/elasticsearch/monitor/jvm/JvmInfo.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java index e8380a77962d4..b8eda3303377d 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmInfo.java @@ -19,6 +19,7 @@ package org.elasticsearch.monitor.jvm; +import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.SuppressForbidden; @@ -171,7 +172,11 @@ private static boolean usingBundledJdk() { */ final String javaHome = System.getProperty("java.home"); final String userDir = System.getProperty("user.dir"); - return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk").toAbsolutePath()); + if (Constants.MAC_OS_X) { + return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk/Contents/Home").toAbsolutePath()); + } else { + return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk").toAbsolutePath()); + } } public static JvmInfo jvmInfo() { From c65291761f6a9c77475ab22994fff523781a304a Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Mon, 1 Apr 2019 11:21:59 +1100 Subject: [PATCH 47/63] Add build utility to check cluster health over ssl (#40573) By default, in integ tests we wait for the standalone cluster to start by using the ant Get task to retrieve the cluster health endpoint. However the ant task has no facilities for customising the trusted CAs for a https resource, so if the integ test cluster has TLS enabled on the http interface (using a custom CA) we need a separate utility for that purpose. Resolves: #38072 --- buildSrc/build.gradle | 1 + .../gradle/http/WaitForHttpResource.java | 233 ++++++++++++++++++ .../gradle/http/WaitForHttpResourceTests.java | 60 +++++ buildSrc/src/test/resources/ca.p12 | Bin 0 -> 1130 bytes buildSrc/src/test/resources/ca.pem | 25 ++ .../reindex-tests-with-security/build.gradle | 57 +---- x-pack/qa/smoke-test-plugins-ssl/build.gradle | 54 +--- 7 files changed, 333 insertions(+), 97 deletions(-) create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java create mode 100644 buildSrc/src/test/java/org/elasticsearch/gradle/http/WaitForHttpResourceTests.java create mode 100644 buildSrc/src/test/resources/ca.p12 create mode 100644 buildSrc/src/test/resources/ca.pem diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index be54b2c68f639..9d25532d4cef1 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -245,6 +245,7 @@ if (project != rootProject) { forbiddenPatterns { exclude '**/*.wav' + exclude '**/*.p12' // the file that actually defines nocommit exclude '**/ForbiddenPatternsTask.java' } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java b/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java new file mode 100644 index 0000000000000..a8680ef13dda0 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java @@ -0,0 +1,233 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.http; + +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; + +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.KeyManager; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.SecureRandom; +import java.security.cert.Certificate; +import java.security.cert.CertificateFactory; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; +import java.util.Enumeration; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +/** + * A utility to wait for a specific HTTP resource to be available, optionally with customized TLS trusted CAs. + * This is logically similar to using the Ant Get task to retrieve a resource, but with the difference that it can + * access resources that do not use the JRE's default trusted CAs. + */ +public class WaitForHttpResource { + + private static final Logger logger = Logging.getLogger(WaitForHttpResource.class); + + private Set validResponseCodes = Collections.singleton(200); + private URL url; + private Set certificateAuthorities; + private File trustStoreFile; + private String trustStorePassword; + private String username; + private String password; + + public WaitForHttpResource(String protocol, String host, int numberOfNodes) throws MalformedURLException { + this(new URL(protocol + "://" + host + "/_cluster/health?wait_for_nodes=>=" + numberOfNodes + "&wait_for_status=yellow")); + } + + public WaitForHttpResource(URL url) { + this.url = url; + } + + public void setValidResponseCodes(int... validResponseCodes) { + this.validResponseCodes = new HashSet<>(validResponseCodes.length); + for (int rc : validResponseCodes) { + this.validResponseCodes.add(rc); + } + } + + public void setCertificateAuthorities(File... certificateAuthorities) { + this.certificateAuthorities = new HashSet<>(Arrays.asList(certificateAuthorities)); + } + + public void setTrustStoreFile(File trustStoreFile) { + this.trustStoreFile = trustStoreFile; + } + + public void setTrustStorePassword(String trustStorePassword) { + this.trustStorePassword = trustStorePassword; + } + + public void setUsername(String username) { + this.username = username; + } + + public void setPassword(String password) { + this.password = password; + } + + public boolean wait(int durationInMs) throws GeneralSecurityException, InterruptedException, IOException { + final long waitUntil = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(durationInMs); + final long sleep = Long.max(durationInMs / 10, 100); + + final SSLContext ssl; + final KeyStore trustStore = buildTrustStore(); + if (trustStore != null) { + ssl = createSslContext(trustStore); + } else { + ssl = null; + } + IOException failure = null; + for (; ; ) { + try { + checkResource(ssl); + return true; + } catch (IOException e) { + logger.debug("Failed to access resource [{}]", url, e); + failure = e; + } + if (System.nanoTime() < waitUntil) { + Thread.sleep(sleep); + } else { + logger.error("Failed to access url [{}]", url, failure); + return false; + } + } + } + + protected void checkResource(SSLContext ssl) throws IOException { + try { + final HttpURLConnection connection = buildConnection(ssl); + connection.connect(); + final Integer response = connection.getResponseCode(); + if (validResponseCodes.contains(response)) { + logger.info("Got successful response [{}] from URL [{}]", response, url); + return; + } else { + throw new IOException(response + " " + connection.getResponseMessage()); + } + } catch (IOException e) { + throw e; + } + } + + HttpURLConnection buildConnection(SSLContext ssl) throws IOException { + final HttpURLConnection connection = (HttpURLConnection) this.url.openConnection(); + configureSslContext(connection, ssl); + configureBasicAuth(connection); + connection.setRequestMethod("GET"); + return connection; + } + + private void configureSslContext(HttpURLConnection connection, SSLContext ssl) { + if (ssl != null) { + if (connection instanceof HttpsURLConnection) { + ((HttpsURLConnection) connection).setSSLSocketFactory(ssl.getSocketFactory()); + } else { + throw new IllegalStateException("SSL trust has been configured, but [" + url + "] is not a 'https' URL"); + } + } + } + + private void configureBasicAuth(HttpURLConnection connection) { + if (username != null) { + if (password == null) { + throw new IllegalStateException("Basic Auth user [" + username + + "] has been set, but no password has been configured"); + } + connection.setRequestProperty("Authorization", + "Basic " + Base64.getEncoder().encodeToString((username + ":" + password).getBytes(StandardCharsets.UTF_8))); + } + } + + KeyStore buildTrustStore() throws GeneralSecurityException, IOException { + if (this.certificateAuthorities != null) { + if (trustStoreFile != null) { + throw new IllegalStateException("Cannot specify both truststore and CAs"); + } + return buildTrustStoreFromCA(); + } else if (trustStoreFile != null) { + return buildTrustStoreFromFile(); + } else { + return null; + } + } + + private KeyStore buildTrustStoreFromFile() throws GeneralSecurityException, IOException { + KeyStore keyStore = KeyStore.getInstance(trustStoreFile.getName().endsWith(".jks") ? "JKS" : "PKCS12"); + try (InputStream input = new FileInputStream(trustStoreFile)) { + keyStore.load(input, trustStorePassword == null ? null : trustStorePassword.toCharArray()); + } + return keyStore; + } + + private KeyStore buildTrustStoreFromCA() throws GeneralSecurityException, IOException { + final KeyStore store = KeyStore.getInstance(KeyStore.getDefaultType()); + store.load(null, null); + final CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); + int counter = 0; + for (File ca : certificateAuthorities) { + try (InputStream input = new FileInputStream(ca)) { + for (Certificate certificate : certFactory.generateCertificates(input)) { + store.setCertificateEntry("cert-" + counter, certificate); + counter++; + } + } + } + return store; + } + + private SSLContext createSslContext(KeyStore trustStore) throws GeneralSecurityException { + checkForTrustEntry(trustStore); + TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + tmf.init(trustStore); + SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); + sslContext.init(new KeyManager[0], tmf.getTrustManagers(), new SecureRandom()); + return sslContext; + } + + private void checkForTrustEntry(KeyStore trustStore) throws KeyStoreException { + Enumeration enumeration = trustStore.aliases(); + while (enumeration.hasMoreElements()) { + if (trustStore.isCertificateEntry(enumeration.nextElement())) { + // found trusted cert entry + return; + } + } + throw new IllegalStateException("Trust-store does not contain any trusted certificate entries"); + } +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/http/WaitForHttpResourceTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/http/WaitForHttpResourceTests.java new file mode 100644 index 0000000000000..67bae367c6f9f --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/http/WaitForHttpResourceTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.http; + +import org.elasticsearch.gradle.test.GradleUnitTestCase; + +import java.io.File; +import java.net.URL; +import java.security.KeyStore; +import java.security.cert.Certificate; +import java.security.cert.X509Certificate; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.notNullValue; + +public class WaitForHttpResourceTests extends GradleUnitTestCase { + + public void testBuildTrustStoreFromFile() throws Exception { + final WaitForHttpResource http = new WaitForHttpResource(new URL("https://localhost/")); + final URL ca = getClass().getResource("/ca.p12"); + assertThat(ca, notNullValue()); + http.setTrustStoreFile(new File(ca.getPath())); + http.setTrustStorePassword("password"); + final KeyStore store = http.buildTrustStore(); + final Certificate certificate = store.getCertificate("ca"); + assertThat(certificate, notNullValue()); + assertThat(certificate, instanceOf(X509Certificate.class)); + assertThat(((X509Certificate)certificate).getSubjectDN().toString(), equalTo("CN=Elastic Certificate Tool Autogenerated CA")); + } + + public void testBuildTrustStoreFromCA() throws Exception { + final WaitForHttpResource http = new WaitForHttpResource(new URL("https://localhost/")); + final URL ca = getClass().getResource("/ca.pem"); + assertThat(ca, notNullValue()); + http.setCertificateAuthorities(new File(ca.getPath())); + final KeyStore store = http.buildTrustStore(); + final Certificate certificate = store.getCertificate("cert-0"); + assertThat(certificate, notNullValue()); + assertThat(certificate, instanceOf(X509Certificate.class)); + assertThat(((X509Certificate)certificate).getSubjectDN().toString(), equalTo("CN=Elastic Certificate Tool Autogenerated CA")); + } +} diff --git a/buildSrc/src/test/resources/ca.p12 b/buildSrc/src/test/resources/ca.p12 new file mode 100644 index 0000000000000000000000000000000000000000..cc44494515b9f415b10e5a1680a351004f36336e GIT binary patch literal 1130 zcmV-w1eN>`YKzq*P z3y>M9*-ScP?1G-yVrojUCy_ALnwg}ca60EThMM9YrH`bM(eT@AOI>a^M^U!^s2%az zrPQgnSOrO6=k;|gH>|m$R5&%)w(cY^zJv(c$#cHvXZYU5thBcru&BI=EIUFjS!R2>kujzOGMgd?)`w3RP8sF^jF`%@< zC2leR<(U!v>Vpdc8?^P?y(?yX7~Q-DK$zzh@9scpmNili{S9kae_0$UviHYUHkwLJ zmB?Aegqu!`e>iOJO$V(GPY3cW;(OU{aEwqk`_=&@JuX^W5mru5m3(VY7Tb;3A6u@` zCbt^A)%YsHuSwmuiPei=Mn}Nol$aR5>2V)0T03^>96pBgXxA=&LN&6=1Od0obZ$LC z9k%l=Yfq)jlH1>WEbj?#x$zugDT5=*d4VxyyaA_dOsL&leFbs~*rNJ?oM0wYZkr&w z6j~LIZ&XQ}1@b|m`}>cHOmLu&P?_lmlG+=S{-s-j%`4*sbZIPyxx&7YF8hH3n-8U( zw9AX*Box9=@bd~T;$T+ore@}|0#G=pwF&V8xg39DzX5|+&qEZDXfEu28LCl=;Ex@x zrafigSLJn#r{=ZBPV>`7Zx~&I7#)yO^EQGV0-p);N$^Vb$s3AC5D4`R$E{QTY@eJg zf<>EFC9d=ndoy%S{?q(;3d%t^IMHcoMyW$GwT5(SvR4$Zcn4QvX{R)AK1Sd(^PrI; zG_*Ji{3HO*z$bB>JOHvq$R%s=Qn9y6iu2C+7u7_TLNNrZog6HzS_KT!KkHE$65maB z-4#<)UeY2()Ai>Rv!54{y;tjU*s1ZNG~ z3#wsfdzy0zT5@hdR#AExpO_Hew~Mc!cZaYpHoW+dT!HPXW4RrM9RS}K&*K`=frAutIB1uG5%0vZJX1QZ{H wm_-g$!e*DmGljOO=GL*I@+JfnfnL+d3d7|K`}9pm4!>XT-tEOT0s{etpmUoKWB>pF literal 0 HcmV?d00001 diff --git a/buildSrc/src/test/resources/ca.pem b/buildSrc/src/test/resources/ca.pem new file mode 100644 index 0000000000000..8dda1767e4838 --- /dev/null +++ b/buildSrc/src/test/resources/ca.pem @@ -0,0 +1,25 @@ +Bag Attributes + friendlyName: ca + localKeyID: 54 69 6D 65 20 31 35 35 33 37 34 33 38 39 30 38 33 35 +subject=/CN=Elastic Certificate Tool Autogenerated CA +issuer=/CN=Elastic Certificate Tool Autogenerated CA +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIVAMQMmDRcXfXLaTp6ep1H8rC3tOrwMA0GCSqGSIb3DQEB +CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu +ZXJhdGVkIENBMB4XDTE5MDMyODAzMzEyNloXDTIyMDMyNzAzMzEyNlowNDEyMDAG +A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDT73N6JZeBPyzahc0aNcra +BpUROVGB9wXQqf8JeU4GtH+1qfqUKYKUJTe/DZWc+5Qz1WAKGZEvBySAlgbuncuq +VpLzWxpEui1vRW8JB3gjZgeY3vfErrEWWr95YM0e8rWu4AoAchzqsrG0/+po2eui +cN+8hI6jRKiBv/ZeQqja6KZ8y4Wt4VaNVL53+I7+eWA/aposu6/piUg2wZ/FNhVK +hypcJwDdp3fQaugtPj3y76303jTRgutgd3rtWFuy3MCDLfs3mSQUjO10s93zwLdC +XokyIywijS5CpO8mEuDRu9rb5J1DzwUpUfk+GMObb6rHjFKzSqnM3s+nasypQQ9L +AgMBAAGjUzBRMB0GA1UdDgQWBBQZEW88R95zSzO2tLseEWgI7ugvLzAfBgNVHSME +GDAWgBQZEW88R95zSzO2tLseEWgI7ugvLzAPBgNVHRMBAf8EBTADAQH/MA0GCSqG +SIb3DQEBCwUAA4IBAQBEJN0UbL77usVnzIvxKa3GpLBgJQAZtD1ifZppC4w46Bul +1G7Fdc+XMbzZlI4K6cWEdd5dfEssKA8btEtRzdNOqgggBpqrUU0mNlQ+vC22XORU +ykHAu2TsRwoHmuxkd9Et/QyuTFXR4fTiU8rsJuLFOgn+RdEblA0J0gJeIqdWI5Z1 +z13OyZEl6BCQFyrntu2eERxaHEfsJOSBZE4RcecnLNGhIJBXE0Pk4iTiViJF/h7d ++kUUegKx0qewZif2eEZgrz12Vuen9a6bh2i2pNS95vABVVMr8uB+J1BGkNA5YT7J +qtZA2tN//Evng7YDiR+KkB1kvXVZVIi2WPDLD/zu +-----END CERTIFICATE----- diff --git a/x-pack/qa/reindex-tests-with-security/build.gradle b/x-pack/qa/reindex-tests-with-security/build.gradle index 3d415e0e2922a..64e1c61b60717 100644 --- a/x-pack/qa/reindex-tests-with-security/build.gradle +++ b/x-pack/qa/reindex-tests-with-security/build.gradle @@ -1,10 +1,4 @@ -import javax.net.ssl.HttpsURLConnection -import javax.net.ssl.KeyManager -import javax.net.ssl.SSLContext -import javax.net.ssl.TrustManagerFactory -import java.nio.charset.StandardCharsets -import java.security.KeyStore -import java.security.SecureRandom +import org.elasticsearch.gradle.http.WaitForHttpResource apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' @@ -57,48 +51,11 @@ integTestCluster { 'bin/elasticsearch-users', 'useradd', user, '-p', 'x-pack-test-password', '-r', role } waitCondition = { node, ant -> - // Load the CA PKCS#12 file as a truststore - KeyStore ks = KeyStore.getInstance("PKCS12"); - ks.load(caFile.newInputStream(), 'password'.toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(ks); - - // Configre a SSL context for TLS1.2 using our CA trust manager - SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); - sslContext.init(new KeyManager[0], tmf.getTrustManagers(), new SecureRandom()); - - // Check whether the cluster has started - URL url = new URL("https://${node.httpUri()}/_cluster/health?wait_for_nodes=${numNodes}&wait_for_status=yellow"); - for (int i = 20; i >= 0; i--) { - // we use custom wait logic here for HTTPS - HttpsURLConnection httpURLConnection = null; - try { - logger.info("Trying ${url}"); - httpURLConnection = (HttpsURLConnection) url.openConnection(); - httpURLConnection.setSSLSocketFactory(sslContext.getSocketFactory()); - httpURLConnection.setRequestProperty("Authorization", - "Basic " + Base64.getEncoder().encodeToString("test_admin:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); - httpURLConnection.setRequestMethod("GET"); - httpURLConnection.connect(); - if (httpURLConnection.getResponseCode() == 200) { - logger.info("Cluster has started"); - return true; - } else { - logger.debug("HTTP response was [{}]", httpURLConnection.getResponseCode()); - } - } catch (IOException e) { - if (i == 0) { - logger.error("Failed to call cluster health - " + e) - } - logger.debug("Call to [{}] threw an exception", url, e) - } finally { - if (httpURLConnection != null) { - httpURLConnection.disconnect(); - } - } - // did not start, so wait a bit before trying again - Thread.sleep(750L); - } - return false; + WaitForHttpResource http = new WaitForHttpResource("https", node.httpUri(), numNodes) + http.setTrustStoreFile(caFile) + http.setTrustStorePassword("password") + http.setUsername("test_admin") + http.setPassword("x-pack-test-password") + return http.wait(5000) } } diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index 5721815f07856..e88eac3028f3d 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -1,13 +1,6 @@ import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.test.NodeInfo - -import javax.net.ssl.HttpsURLConnection -import javax.net.ssl.KeyManager -import javax.net.ssl.SSLContext -import javax.net.ssl.TrustManagerFactory -import java.nio.charset.StandardCharsets -import java.security.KeyStore -import java.security.SecureRandom +import org.elasticsearch.gradle.http.WaitForHttpResource apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' @@ -86,45 +79,12 @@ integTestCluster { 'bin/elasticsearch-users', 'useradd', 'monitoring_agent', '-p', 'x-pack-test-password', '-r', 'remote_monitoring_agent' waitCondition = { NodeInfo node, AntBuilder ant -> - File tmpFile = new File(node.cwd, 'wait.success') - KeyStore keyStore = KeyStore.getInstance("JKS"); - keyStore.load(clientKeyStore.newInputStream(), 'testclient'.toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(keyStore); - // We don't need a KeyManager as there won't be client auth required so pass an empty array - SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); - sslContext.init(new KeyManager[0], tmf.getTrustManagers(), new SecureRandom()); - for (int i = 0; i < 10; i++) { - // we use custom wait logic here for HTTPS - HttpsURLConnection httpURLConnection = null; - try { - httpURLConnection = (HttpsURLConnection) new URL("https://${node.httpUri()}/_cluster/health?wait_for_nodes=${numNodes}&wait_for_status=yellow").openConnection(); - httpURLConnection.setSSLSocketFactory(sslContext.getSocketFactory()); - httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); - httpURLConnection.setRequestMethod("GET"); - httpURLConnection.connect(); - if (httpURLConnection.getResponseCode() == 200) { - tmpFile.withWriter StandardCharsets.UTF_8.name(), { - it.write(httpURLConnection.getInputStream().getText(StandardCharsets.UTF_8.name())) - } - } - } catch (IOException e) { - if (i == 9) { - logger.error("final attempt of calling cluster health failed", e) - } else { - logger.debug("failed to call cluster health", e) - } - } finally { - if (httpURLConnection != null) { - httpURLConnection.disconnect(); - } - } - - // did not start, so wait a bit before trying again - Thread.sleep(500L); - } - return tmpFile.exists() + WaitForHttpResource http = new WaitForHttpResource("https", node.httpUri(), numNodes) + http.setTrustStoreFile(clientKeyStore) + http.setTrustStorePassword("testclient") + http.setUsername("test_user") + http.setPassword("x-pack-test-password") + return http.wait(5000) } } From 31503f937b1f7ee2eb621a3968062bc3c4a62be7 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 31 Mar 2019 22:10:45 -0400 Subject: [PATCH 48/63] Remove mention of soft deletes from getting started (#40668) We no longer need to mention soft deletes in the getting started guide now that retention leases exist and default to 12h. This commit removes mention of soft deletes from the getting started guide, to simplify that content. --- docs/reference/ccr/getting-started.asciidoc | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/docs/reference/ccr/getting-started.asciidoc b/docs/reference/ccr/getting-started.asciidoc index a9cd918cf8004..24304fea7642a 100644 --- a/docs/reference/ccr/getting-started.asciidoc +++ b/docs/reference/ccr/getting-started.asciidoc @@ -161,13 +161,6 @@ image::images/remote-clusters.jpg["The Remote Clusters page in {kib}"] [[ccr-getting-started-leader-index]] === Creating a leader index -Leader indices require a special index setting to ensure that the operations -that need to be replicated are available when the follower requests them from -the leader. This setting is used to control how many soft deletes are retained. -A _soft delete_ occurs whenever a document is deleted or updated. Soft deletes -can be enabled only on new indices created on or after {es} 6.5.0, and enabled -by default on new indices created on or after {es} 7.0.0. - In the following example, we will create a leader index in the remote cluster: [source,js] @@ -177,12 +170,7 @@ PUT /server-metrics "settings" : { "index" : { "number_of_shards" : 1, - "number_of_replicas" : 0, - "soft_deletes" : { - "retention" : { - "operations" : 1024 <1> - } - } + "number_of_replicas" : 0 } }, "mappings" : { @@ -214,7 +202,6 @@ PUT /server-metrics -------------------------------------------------- // CONSOLE // TEST[continued] -<1> Sets that up to 1024 soft deletes will be retained. [float] [[ccr-getting-started-follower-index]] From 3fcb5a80a3624da893f78f130a5980da2ef0b469 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Mon, 1 Apr 2019 16:07:38 +1100 Subject: [PATCH 49/63] Remove dynamic objects from security index (#40499) The security index had a few "object" types with "dynamic": true However, this automatically creates a mapping for each field that is created within those objects. This means that types are dynamically inferred and "locked in" for future updates. Instead we want "dynamic": false which will allow us to store a range of fields in these nested objects and retrieve them from the source, without creating mapping types for those fields. --- .../core/src/main/resources/security-index-template.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/src/main/resources/security-index-template.json b/x-pack/plugin/core/src/main/resources/security-index-template.json index f4e3cd6db020d..8d567df5a514f 100644 --- a/x-pack/plugin/core/src/main/resources/security-index-template.json +++ b/x-pack/plugin/core/src/main/resources/security-index-template.json @@ -69,7 +69,7 @@ }, "metadata" : { "type" : "object", - "dynamic" : true + "dynamic" : false }, "enabled": { "type": "boolean" @@ -189,7 +189,7 @@ }, "metadata" : { "type" : "object", - "dynamic" : true + "dynamic" : false }, "realm" : { "type" : "keyword" @@ -198,7 +198,7 @@ }, "rules" : { "type" : "object", - "dynamic" : true + "dynamic" : false }, "refresh_token" : { "type" : "object", @@ -253,7 +253,7 @@ }, "metadata" : { "type" : "object", - "dynamic" : true + "dynamic" : false }, "authentication" : { "type" : "binary" From 73e229c296e491ebd51315b6b26f14319743a89e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ricky=28=E9=81=93=E7=A6=BB=29?= Date: Mon, 1 Apr 2019 14:00:28 +0800 Subject: [PATCH 50/63] If not set JAVA_HOME then use the JDK that Gradle was run with instead (for master) (#40622) Extending the IDE behavior to the cli --- .../org/elasticsearch/gradle/BuildPlugin.groovy | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 97074965a76f8..8a0e7a05327b4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -361,16 +361,8 @@ class BuildPlugin implements Plugin { compilerJavaHome = findJavaHome(compilerJavaProperty) } if (compilerJavaHome == null) { - if (System.getProperty("idea.active") != null || System.getProperty("eclipse.launcher") != null) { - // IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with - return Jvm.current().javaHome - } else { - throw new GradleException( - "JAVA_HOME must be set to build Elasticsearch. " + - "Note that if the variable was just set you might have to run `./gradlew --stop` for " + - "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details." - ) - } + // if JAVA_HOME does not set,so we use the JDK that Gradle was run with. + return Jvm.current().javaHome } return compilerJavaHome } From 58515c302216f13346e142009da5e1e9b1337d71 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 1 Apr 2019 08:41:08 +0200 Subject: [PATCH 51/63] Fix merging of text field mapper (#40627) On mapping updates the `text` field mapper does not update the field types for the underlying prefix and phrase fields. In practice this shouldn't be considered as a bug but we have an assert in the code that check that field types in the mapper service are identical to the ones present in field mappers. --- .../index/mapper/TextFieldMapper.java | 21 ++++- .../index/mapper/TextFieldMapperTests.java | 92 +++++++++++++++++++ 2 files changed, 110 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 5790248ead807..8a3203ad8e7e0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -813,18 +813,33 @@ protected String contentType() { return CONTENT_TYPE; } + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + TextFieldMapper mapper = (TextFieldMapper) super.updateFieldType(fullNameToFieldType); + if (mapper.prefixFieldMapper != null) { + mapper.prefixFieldMapper = (PrefixFieldMapper) mapper.prefixFieldMapper.updateFieldType(fullNameToFieldType); + } + if (mapper.phraseFieldMapper != null) { + mapper.phraseFieldMapper = (PhraseFieldMapper) mapper.phraseFieldMapper.updateFieldType(fullNameToFieldType); + } + return mapper; + } + @Override protected void doMerge(Mapper mergeWith) { super.doMerge(mergeWith); TextFieldMapper mw = (TextFieldMapper) mergeWith; + if (this.prefixFieldMapper != null && mw.prefixFieldMapper != null) { this.prefixFieldMapper = (PrefixFieldMapper) this.prefixFieldMapper.merge(mw.prefixFieldMapper); - } - else if (this.prefixFieldMapper != null || mw.prefixFieldMapper != null) { + } else if (this.prefixFieldMapper != null || mw.prefixFieldMapper != null) { throw new IllegalArgumentException("mapper [" + name() + "] has different index_prefix settings, current [" + this.prefixFieldMapper + "], merged [" + mw.prefixFieldMapper + "]"); } - else if (this.fieldType().indexPhrases != mw.fieldType().indexPhrases) { + + if (this.phraseFieldMapper != null && mw.phraseFieldMapper != null) { + this.phraseFieldMapper = (PhraseFieldMapper) this.phraseFieldMapper.merge(mw.phraseFieldMapper); + } else if (this.fieldType().indexPhrases != mw.fieldType().indexPhrases) { throw new IllegalArgumentException("mapper [" + name() + "] has different index_phrases settings, current [" + this.fieldType().indexPhrases + "], merged [" + mw.fieldType().indexPhrases + "]"); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 7314ecb1de7c2..449d17a5b9bda 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -78,6 +78,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.Is.is; public class TextFieldMapperTests extends ESSingleNodeTestCase { @@ -1084,4 +1085,95 @@ public void testFastPhrasePrefixes() throws IOException { assertThat(q, equalTo(mpq)); } } + + public void testSimpleMerge() throws IOException { + MapperService mapperService = createIndex("test_mapping_merge").mapperService(); + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .field("index_phrases", true) + .endObject() + .endObject() + .endObject().endObject()); + DocumentMapper mapper = mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapper.mappers().getMapper("a_field"), instanceOf(TextFieldMapper.class)); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .field("index_phrases", true) + .endObject() + .endObject() + .endObject().endObject()); + DocumentMapper mapper = mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapper.mappers().getMapper("a_field"), instanceOf(TextFieldMapper.class)); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes") + .field("min_chars", "3") + .endObject() + .field("index_phrases", true) + .endObject() + .endObject() + .endObject().endObject()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)); + assertThat(e.getMessage(), containsString("different [index_prefixes]")); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .field("index_phrases", false) + .endObject() + .endObject() + .endObject().endObject()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)); + assertThat(e.getMessage(), containsString("different [index_phrases]")); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("a_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .field("index_phrases", true) + .endObject() + .startObject("b_field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject().endObject()); + DocumentMapper mapper = mapperService.merge("_doc", + new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapper.mappers().getMapper("a_field"), instanceOf(TextFieldMapper.class)); + assertThat(mapper.mappers().getMapper("b_field"), instanceOf(KeywordFieldMapper.class)); + } + } } From 354fa31a69a6ea04a74fc49d89a24c3bf8904458 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 1 Apr 2019 10:26:12 +0200 Subject: [PATCH 52/63] Re-enable bwc tests now that #40319 is merged (#40646) --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index f6c3222a4074a..64d51f0938026 100644 --- a/build.gradle +++ b/build.gradle @@ -162,8 +162,8 @@ task verifyVersions { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = false -final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/40319" /* place a PR link here when committing bwc changes */ +boolean bwc_tests_enabled = true +final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") From 1fac56916bb3c4f3333c639e59188dbe743e385b Mon Sep 17 00:00:00 2001 From: Andrey Ershov Date: Mon, 1 Apr 2019 12:49:05 +0300 Subject: [PATCH 53/63] Do not perform cleanup if Manifest write fails with dirty exception (#40519) Currently, if Manifest write is unsuccessful (i.e. WriteStateException is thrown) we perform cleanup of newly created metadata files. However, this is wrong. Consider the following sequence (caught by CI here https://github.com/elastic/elasticsearch/issues/39077): - cluster global data is written **successful** - the associated manifest write **fails** (during the fsync, ie files have been written) - deleting (revert) the manifest files, **fails**, metadata is therefore persisted - deleting (revert) the cluster global data is **successful** In this case, when trying to load metadata (after node restart because of dirty WriteStateException), the following exception will happen ``` java.io.IOException: failed to find global metadata [generation: 0] ``` because the manifest file is referencing missing global metadata file. This commit checks if thrown WriteStateException is dirty and if its we don't perform any cleanup, because new Manifest file might be created, but its deletion has failed. In the future, we might add more fine-grained check - perform the clean up if WriteStateException is dirty, but Manifest deletion is successful. Closes https://github.com/elastic/elasticsearch/issues/39077 --- .../java/org/elasticsearch/gateway/GatewayMetaState.java | 9 ++++++++- .../org/elasticsearch/gateway/GatewayMetaStateTests.java | 1 - 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index bd6fd908d49ab..30361fa70ee6b 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -320,7 +320,14 @@ long writeManifestAndCleanup(String reason, Manifest manifest) throws WriteState finished = true; return generation; } catch (WriteStateException e) { - rollback(); + // if Manifest write results in dirty WriteStateException it's not safe to remove + // new metadata files, because if Manifest was actually written to disk and its deletion + // fails it will reference these new metadata files. + // In the future, we might decide to add more fine grained check to understand if after + // WriteStateException Manifest deletion has actually failed. + if (e.isDirty() == false) { + rollback(); + } throw e; } } diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 1f4e0bafe4a3b..22259b919ec6f 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -374,7 +374,6 @@ private static MetaData randomMetaDataForTx() { return builder.build(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/39077") public void testAtomicityWithFailures() throws IOException { try (NodeEnvironment env = newNodeEnvironment()) { MetaStateServiceWithFailures metaStateService = From 6f12febe44a61f6892d37e6c3f63e0b57ce5e7d4 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Mon, 1 Apr 2019 06:25:06 -0400 Subject: [PATCH 54/63] Correct bug in ScriptDocValues (#40488) If a field `field_name` was missing in a document, doc['field_name'].get(0) incorrectly retrieved a value of the previously accessed document. This happened because `get(int index)` function was just accessing `values[index]` without checking the number of values - `count`. This PR fixes this. --- .../index/fielddata/ScriptDocValues.java | 64 +++++++++---------- .../ScriptDocValuesGeoPointsTests.java | 60 ++++++++++++----- .../fielddata/ScriptDocValuesLongsTests.java | 4 ++ 3 files changed, 81 insertions(+), 47 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index 6aad80c4421e4..afd1d9e368480 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -112,15 +112,15 @@ protected void resize(int newSize) { } public long getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return values[0]; + return get(0); } @Override public Long get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return values[index]; } @@ -151,15 +151,15 @@ public Dates(SortedNumericDocValues in, boolean isNanos) { * in. */ public JodaCompatibleZonedDateTime getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } return get(0); } @Override public JodaCompatibleZonedDateTime get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } if (index >= count) { throw new IndexOutOfBoundsException( "attempted to fetch the [" + index + "] date when there are only [" @@ -240,15 +240,15 @@ public SortedNumericDoubleValues getInternalValues() { } public double getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return values[0]; + return get(0); } @Override public Double get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return values[index]; } @@ -297,11 +297,7 @@ protected void resize(int newSize) { } public GeoPoint getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return values[0]; + return get(0); } public double getLat() { @@ -330,6 +326,10 @@ public double getLon() { @Override public GeoPoint get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } final GeoPoint point = values[index]; return new GeoPoint(point.lat(), point.lon()); } @@ -409,15 +409,15 @@ protected void resize(int newSize) { } public boolean getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return values[0]; + return get(0); } @Override public Boolean get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return values[index]; } @@ -492,14 +492,14 @@ public Strings(SortedBinaryDocValues in) { @Override public String get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return values[index].get().utf8ToString(); } public String getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } return get(0); } } @@ -512,6 +512,10 @@ public BytesRefs(SortedBinaryDocValues in) { @Override public BytesRef get(int index) { + if (count == 0) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } /** * We need to make a copy here because {@link BinaryScriptDocValues} might reuse the * returned value and the same instance might be used to @@ -521,10 +525,6 @@ public BytesRef get(int index) { } public BytesRef getValue() { - if (count == 0) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } return get(0); } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java index 72d890edc795d..085b43a686bf5 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.fielddata; +import org.elasticsearch.index.fielddata.ScriptDocValues.GeoPoints; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.test.ESTestCase; @@ -28,31 +29,30 @@ public class ScriptDocValuesGeoPointsTests extends ESTestCase { - private static MultiGeoPointValues wrap(final GeoPoint... points) { + private static MultiGeoPointValues wrap(GeoPoint[][] points) { return new MultiGeoPointValues() { - int docID = -1; + GeoPoint[] current; int i; @Override public GeoPoint nextValue() { - if (docID != 0) { - fail(); - } - return points[i++]; + return current[i++]; } @Override public boolean advanceExact(int docId) { - docID = docId; - return points.length > 0; + if (docId < points.length) { + current = points[docId]; + } else { + current = new GeoPoint[0]; + } + i = 0; + return current.length > 0; } @Override public int docValueCount() { - if (docID != 0) { - return 0; - } - return points.length; + return current.length; } }; } @@ -71,7 +71,8 @@ public void testGeoGetLatLon() throws IOException { final double lon1 = randomLon(); final double lon2 = randomLon(); - final MultiGeoPointValues values = wrap(new GeoPoint(lat1, lon1), new GeoPoint(lat2, lon2)); + GeoPoint[][] points = {{new GeoPoint(lat1, lon1), new GeoPoint(lat2, lon2)}}; + final MultiGeoPointValues values = wrap(points); final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values); script.setNextDocId(1); @@ -88,11 +89,13 @@ public void testGeoGetLatLon() throws IOException { public void testGeoDistance() throws IOException { final double lat = randomLat(); final double lon = randomLon(); - final MultiGeoPointValues values = wrap(new GeoPoint(lat, lon)); + GeoPoint[][] points = {{new GeoPoint(lat, lon)}}; + final MultiGeoPointValues values = wrap(points); final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values); script.setNextDocId(0); - final ScriptDocValues.GeoPoints emptyScript = new ScriptDocValues.GeoPoints(wrap()); + GeoPoint[][] points2 = {new GeoPoint[0]}; + final ScriptDocValues.GeoPoints emptyScript = new ScriptDocValues.GeoPoints(wrap(points2)); emptyScript.setNextDocId(0); final double otherLat = randomLat(); @@ -110,4 +113,31 @@ public void testGeoDistance() throws IOException { script.planeDistanceWithDefault(otherLat, otherLon, 42) / 1000d, 0.01); assertEquals(42, emptyScript.planeDistanceWithDefault(otherLat, otherLon, 42), 0); } + + public void testMissingValues() throws IOException { + GeoPoint[][] points = new GeoPoint[between(3, 10)][]; + for (int d = 0; d < points.length; d++) { + points[d] = new GeoPoint[randomBoolean() ? 0 : between(1, 10)]; + } + final ScriptDocValues.GeoPoints geoPoints = new GeoPoints(wrap(points)); + for (int d = 0; d < points.length; d++) { + geoPoints.setNextDocId(d); + if (points[d].length > 0) { + assertEquals(points[d][0], geoPoints.getValue()); + } else { + Exception e = expectThrows(IllegalStateException.class, () -> geoPoints.getValue()); + assertEquals("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); + e = expectThrows(IllegalStateException.class, () -> geoPoints.get(0)); + assertEquals("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); + } + assertEquals(points[d].length, geoPoints.size()); + for (int i = 0; i < points[d].length; i++) { + assertEquals(points[d][i], geoPoints.get(i)); + } + } + } + + } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java index a5674e4da7d7d..c74725d3774b7 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java @@ -42,10 +42,14 @@ public void testLongs() throws IOException { longs.setNextDocId(d); if (values[d].length > 0) { assertEquals(values[d][0], longs.getValue()); + assertEquals(values[d][0], (long) longs.get(0)); } else { Exception e = expectThrows(IllegalStateException.class, () -> longs.getValue()); assertEquals("A document doesn't have a value for a field! " + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); + e = expectThrows(IllegalStateException.class, () -> longs.get(0)); + assertEquals("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); } assertEquals(values[d].length, longs.size()); for (int i = 0; i < values[d].length; i++) { From 587a84dc5adf5aca27e2b50086044250ddc2e657 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 1 Apr 2019 07:05:17 -0500 Subject: [PATCH 55/63] [ML] fix test, should account for async nature of audit (#40637) --- .../integration/DataFrameAuditorIT.java | 34 ++++++++++++------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java index 2367e255cd9ba..9884c9bb6793b 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameAuditorIT.java @@ -17,7 +17,9 @@ import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; public class DataFrameAuditorIT extends DataFrameRestTestCase { @@ -49,7 +51,6 @@ public void createIndexes() throws IOException { setupUser(TEST_USER_NAME, Arrays.asList("data_frame_transforms_admin", DATA_ACCESS_ROLE)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40594") @SuppressWarnings("unchecked") public void testAuditorWritesAudits() throws Exception { String transformId = "simplePivotForAudit"; @@ -62,17 +63,26 @@ public void testAuditorWritesAudits() throws Exception { startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); // Make sure we wrote to the audit - assertTrue(indexExists(DataFrameInternalIndex.AUDIT_INDEX)); - refreshIndex(DataFrameInternalIndex.AUDIT_INDEX); - Request request = new Request("GET", DataFrameInternalIndex.AUDIT_INDEX + "/_search"); + final Request request = new Request("GET", DataFrameInternalIndex.AUDIT_INDEX + "/_search"); request.setJsonEntity("{\"query\":{\"term\":{\"transform_id\":\"simplePivotForAudit\"}}}"); - Map response = entityAsMap(client().performRequest(request)); - Map hitRsp = (Map) ((List) ((Map)response.get("hits")).get("hits")).get(0); - Map source = (Map)hitRsp.get("_source"); - assertThat(source.get("transform_id"), equalTo(transformId)); - assertThat(source.get("level"), equalTo("info")); - assertThat(source.get("message"), is(notNullValue())); - assertThat(source.get("node_name"), is(notNullValue())); - assertThat(source.get("timestamp"), is(notNullValue())); + assertBusy(() -> { + assertTrue(indexExists(DataFrameInternalIndex.AUDIT_INDEX)); + }); + // Since calls to write the Auditor are sent and forgot (async) we could have returned from the start, + // finished the job (as this is a very short DF job), all without the audit being fully written. + assertBusy(() -> { + refreshIndex(DataFrameInternalIndex.AUDIT_INDEX); + Map response = entityAsMap(client().performRequest(request)); + List hitList = ((List) ((Map)response.get("hits")).get("hits")); + assertThat(hitList, is(not(empty()))); + Map hitRsp = (Map) hitList.get(0); + Map source = (Map)hitRsp.get("_source"); + assertThat(source.get("transform_id"), equalTo(transformId)); + assertThat(source.get("level"), equalTo("info")); + assertThat(source.get("message"), is(notNullValue())); + assertThat(source.get("node_name"), is(notNullValue())); + assertThat(source.get("timestamp"), is(notNullValue())); + }); + } } From b6ca8b73a95e0359d90794d00203c4ca3b7fc358 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 1 Apr 2019 07:08:06 -0500 Subject: [PATCH 56/63] [ML] Addressing bug streaming DatafeedConfig aggs from (<= 6.5.4) -> 6.7.0 (#40610) (#40660) * Addressing stream failure and adding tests to catch such in the future * Add aggs to full cluster restart tests * Test BWC for datafeeds with and without aggs The wire serialisation is different for null/non-null aggs, so it's worth testing both cases. --- .../xpack/core/ml/datafeed/AggProvider.java | 6 +- .../core/ml/datafeed/DatafeedConfig.java | 2 + .../MlMigrationFullClusterRestartIT.java | 12 ++ x-pack/qa/rolling-upgrade/build.gradle | 2 + .../mixed_cluster/40_ml_datafeed_crud.yml | 113 ++++++++++++++++-- .../test/old_cluster/40_ml_datafeed_crud.yml | 90 ++++++++++++-- .../upgraded_cluster/40_ml_datafeed_crud.yml | 52 ++++++-- 7 files changed, 250 insertions(+), 27 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java index 7982cffb01de5..8585e4122e673 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java @@ -73,7 +73,8 @@ static AggProvider fromStream(StreamInput in) throws IOException { } else if (in.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects return new AggProvider(in.readMap(), null, null); } else { // only supports eagerly parsed objects - return AggProvider.fromParsedAggs(in.readOptionalWriteable(AggregatorFactories.Builder::new)); + // Upstream, we have read the bool already and know for sure that we have parsed aggs in the stream + return AggProvider.fromParsedAggs(new AggregatorFactories.Builder(in)); } } @@ -111,7 +112,8 @@ public void writeTo(StreamOutput out) throws IOException { // actually are aggregations defined throw new ElasticsearchException("Unsupported operation: parsed aggregations are null"); } - out.writeOptionalWriteable(parsedAggs); + // Upstream we already verified that this calling object is not null, no need to write a second boolean to the stream + parsedAggs.writeTo(out); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 3cd071f61aaee..810d97df34636 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -212,6 +212,7 @@ public DatafeedConfig(StreamInput in) throws IOException { } // each of these writables are version aware this.queryProvider = QueryProvider.fromStream(in); + // This reads a boolean from the stream, if true, it sends the stream to the `fromStream` method this.aggProvider = in.readOptionalWriteable(AggProvider::fromStream); if (in.readBoolean()) { @@ -420,6 +421,7 @@ public void writeTo(StreamOutput out) throws IOException { // Each of these writables are version aware queryProvider.writeTo(out); // never null + // This writes a boolean to the stream, if true, it sends the stream to the `writeTo` method out.writeOptionalWriteable(aggProvider); if (scriptFields != null) { diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java index 2161ea1fd2aa0..917d73d5af2c4 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java @@ -13,6 +13,10 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.upgrades.AbstractFullClusterRestartTestCase; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -112,6 +116,7 @@ private void oldClusterTests() throws IOException { DatafeedConfig.Builder dfBuilder = new DatafeedConfig.Builder(OLD_CLUSTER_STARTED_DATAFEED_ID, OLD_CLUSTER_OPEN_JOB_ID); dfBuilder.setIndices(Collections.singletonList("airline-data")); + addAggregations(dfBuilder); Request putDatafeed = new Request("PUT", "_xpack/ml/datafeeds/" + OLD_CLUSTER_STARTED_DATAFEED_ID); putDatafeed.setJsonEntity(Strings.toString(dfBuilder.build())); @@ -245,4 +250,11 @@ private void assertJobNotPresent(String jobId, List> jobs) { .filter(id -> id.equals(jobId)).findFirst(); assertFalse(config.isPresent()); } + + private void addAggregations(DatafeedConfig.Builder dfBuilder) { + TermsAggregationBuilder airline = AggregationBuilders.terms("airline"); + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time").subAggregation(airline); + dfBuilder.setParsedAggregations(AggregatorFactories.builder().addAggregator( + AggregationBuilders.histogram("time").interval(300000).subAggregation(maxTime).field("time"))); + } } diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index f689573a61437..0cdbbe71e55ba 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -232,6 +232,8 @@ for (Version version : bwcVersions.wireCompatible) { 'mixed_cluster/10_basic/Start scroll in mixed cluster on upgraded node that we will continue after upgrade', 'mixed_cluster/30_ml_jobs_crud/Create a job in the mixed cluster and write some data', 'mixed_cluster/40_ml_datafeed_crud/Put job and datafeed in mixed cluster', + 'mixed_cluster/40_ml_datafeed_crud/Put job and datafeed without aggs in mixed cluster', + 'mixed_cluster/40_ml_datafeed_crud/Put job and datafeed with aggs in mixed cluster' ].join(',') finalizedBy "${baseName}#oldClusterTestCluster#node1.stop" } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml index 6812245e5a24e..b37d6de4947c7 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml @@ -1,28 +1,94 @@ --- -"Test old cluster datafeed": +"Test old cluster datafeed without aggs": - do: ml.get_datafeeds: - datafeed_id: old-cluster-datafeed - - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed"} + datafeed_id: old-cluster-datafeed-without-aggs + - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-without-aggs"} - length: { datafeeds.0.indices: 1 } - gte: { datafeeds.0.scroll_size: 2000 } + - match: { datafeeds.0.script_fields.double_responsetime.script.lang: painless } + - is_false: datafeeds.0.aggregations - do: ml.get_datafeed_stats: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-without-aggs - match: { datafeeds.0.state: "stopped"} - is_false: datafeeds.0.node --- -"Put job and datafeed in mixed cluster": +"Test old cluster datafeed with aggs": + - do: + ml.get_datafeeds: + datafeed_id: old-cluster-datafeed-with-aggs + - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-with-aggs"} + - length: { datafeeds.0.indices: 1 } + - gte: { datafeeds.0.scroll_size: 2000 } + - is_false: datafeeds.0.script_fields + - match: { datafeeds.0.aggregations.buckets.date_histogram.field: time } + - match: { datafeeds.0.aggregations.buckets.aggregations.time.max.field: time } + + - do: + ml.get_datafeed_stats: + datafeed_id: old-cluster-datafeed-with-aggs + - match: { datafeeds.0.state: "stopped"} + - is_false: datafeeds.0.node + +--- +"Put job and datafeed without aggs in mixed cluster": + - do: + ml.put_job: + job_id: mixed-cluster-datafeed-job-without-aggs + body: > + { + "description":"Cluster upgrade", + "analysis_config" : { + "bucket_span": "60s", + "detectors" :[{"function":"count"}] + }, + "analysis_limits" : { + "model_memory_limit": "50mb" + }, + "data_description" : { + "format":"xcontent", + "time_field":"time" + } + } + - do: + ml.put_datafeed: + datafeed_id: mixed-cluster-datafeed-without-aggs + body: > + { + "job_id":"mixed-cluster-datafeed-job-without-aggs", + "indices":["airline-data"], + "scroll_size": 2000, + "script_fields": { + "double_responsetime": { + "script": { + "lang": "painless", + "source": "doc['responsetime'].value * 2" + } + } + } + } + + - do: + ml.get_datafeed_stats: + datafeed_id: mixed-cluster-datafeed-without-aggs + - match: { datafeeds.0.state: stopped} + - is_false: datafeeds.0.node + +--- +"Put job and datafeed with aggs in mixed cluster": + - do: ml.put_job: - job_id: mixed-cluster-datafeed-job + job_id: mixed-cluster-datafeed-job-with-aggs body: > { "description":"Cluster upgrade", "analysis_config" : { "bucket_span": "60s", + "summary_count_field_name": "doc_count", "detectors" :[{"function":"count"}] }, "analysis_limits" : { @@ -36,16 +102,43 @@ - do: ml.put_datafeed: - datafeed_id: mixed-cluster-datafeed + datafeed_id: mixed-cluster-datafeed-with-aggs body: > { - "job_id":"mixed-cluster-datafeed-job", + "job_id":"mixed-cluster-datafeed-job-with-aggs", "indices":["airline-data"], - "scroll_size": 2000 + "scroll_size": 2000, + "aggregations": { + "buckets": { + "date_histogram": { + "field": "time", + "interval": "30s", + "time_zone": "UTC" + }, + "aggregations": { + "time": { + "max": {"field": "time"} + }, + "airline": { + "terms": { + "field": "airline", + "size": 100 + }, + "aggregations": { + "responsetime": { + "avg": { + "field": "responsetime" + } + } + } + } + } + } + } } - do: ml.get_datafeed_stats: - datafeed_id: mixed-cluster-datafeed + datafeed_id: mixed-cluster-datafeed-with-aggs - match: { datafeeds.0.state: stopped} - is_false: datafeeds.0.node diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml index 269ecf3c677e6..597540d36c4ec 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml @@ -1,8 +1,9 @@ --- -"Put job and datafeed in old cluster": +"Put job and datafeed without aggs in old cluster": + - do: ml.put_job: - job_id: old-cluster-datafeed-job + job_id: old-cluster-datafeed-job-without-aggs body: > { "description":"Cluster upgrade", @@ -18,20 +19,95 @@ "time_field":"time" } } - - match: { job_id: old-cluster-datafeed-job } + - match: { job_id: old-cluster-datafeed-job-without-aggs } - do: ml.put_datafeed: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-without-aggs body: > { - "job_id":"old-cluster-datafeed-job", + "job_id":"old-cluster-datafeed-job-without-aggs", "indices":["airline-data"], - "scroll_size": 2000 + "scroll_size": 2000, + "script_fields": { + "double_responsetime": { + "script": { + "lang": "painless", + "source": "doc['responsetime'].value * 2" + } + } + } + } + + - do: + ml.get_datafeed_stats: + datafeed_id: old-cluster-datafeed-without-aggs + - match: { datafeeds.0.state: stopped} + - is_false: datafeeds.0.node + +--- +"Put job and datafeed with aggs in old cluster": + + - do: + ml.put_job: + job_id: old-cluster-datafeed-job-with-aggs + body: > + { + "description":"Cluster upgrade", + "analysis_config" : { + "bucket_span": "60s", + "summary_count_field_name": "doc_count", + "detectors" :[{"function":"count"}] + }, + "analysis_limits" : { + "model_memory_limit": "50mb" + }, + "data_description" : { + "format":"xcontent", + "time_field":"time" + } + } + - match: { job_id: old-cluster-datafeed-job-with-aggs } + + - do: + ml.put_datafeed: + datafeed_id: old-cluster-datafeed-with-aggs + body: > + { + "job_id":"old-cluster-datafeed-job-with-aggs", + "indices":["airline-data"], + "scroll_size": 2000, + "aggregations": { + "buckets": { + "date_histogram": { + "field": "time", + "interval": "30s", + "time_zone": "UTC" + }, + "aggregations": { + "time": { + "max": {"field": "time"} + }, + "airline": { + "terms": { + "field": "airline", + "size": 100 + }, + "aggregations": { + "responsetime": { + "avg": { + "field": "responsetime" + } + } + } + } + } + } + } } - do: ml.get_datafeed_stats: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-with-aggs - match: { datafeeds.0.state: stopped} - is_false: datafeeds.0.node diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml index 928fb3a066c28..6ca05d86b1c4d 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml @@ -5,7 +5,6 @@ setup: wait_for_nodes: 3 # wait for long enough that we give delayed unassigned shards to stop being delayed timeout: 70s - - do: indices.create: index: airline-data @@ -14,32 +13,69 @@ setup: properties: time: type: date +--- +"Test old and mixed cluster datafeeds without aggs": + - do: + ml.get_datafeeds: + datafeed_id: old-cluster-datafeed-without-aggs + - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-without-aggs"} + - length: { datafeeds.0.indices: 1 } + - gte: { datafeeds.0.scroll_size: 2000 } + - match: { datafeeds.0.script_fields.double_responsetime.script.lang: painless } + - is_false: datafeeds.0.aggregations + + - do: + ml.get_datafeed_stats: + datafeed_id: old-cluster-datafeed-without-aggs + - match: { datafeeds.0.state: "stopped"} + - is_false: datafeeds.0.node + + - do: + ml.get_datafeeds: + datafeed_id: mixed-cluster-datafeed-without-aggs + - match: { datafeeds.0.datafeed_id: "mixed-cluster-datafeed-without-aggs"} + - length: { datafeeds.0.indices: 1 } + - gte: { datafeeds.0.scroll_size: 2000 } + - match: { datafeeds.0.script_fields.double_responsetime.script.lang: painless } + - is_false: datafeeds.0.aggregations + + - do: + ml.get_datafeed_stats: + datafeed_id: mixed-cluster-datafeed-without-aggs + - match: { datafeeds.0.state: "stopped"} + - is_false: datafeeds.0.node --- -"Test old and mixed cluster datafeeds": +"Test old and mixed cluster datafeeds with aggs": - do: ml.get_datafeeds: - datafeed_id: old-cluster-datafeed - - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed"} + datafeed_id: old-cluster-datafeed-with-aggs + - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed-with-aggs"} - length: { datafeeds.0.indices: 1 } - gte: { datafeeds.0.scroll_size: 2000 } + - is_false: datafeeds.0.script_fields + - match: { datafeeds.0.aggregations.buckets.date_histogram.field: time } + - match: { datafeeds.0.aggregations.buckets.aggregations.time.max.field: time } - do: ml.get_datafeed_stats: - datafeed_id: old-cluster-datafeed + datafeed_id: old-cluster-datafeed-with-aggs - match: { datafeeds.0.state: "stopped"} - is_false: datafeeds.0.node - do: ml.get_datafeeds: - datafeed_id: mixed-cluster-datafeed - - match: { datafeeds.0.datafeed_id: "mixed-cluster-datafeed"} + datafeed_id: mixed-cluster-datafeed-with-aggs + - match: { datafeeds.0.datafeed_id: "mixed-cluster-datafeed-with-aggs"} - length: { datafeeds.0.indices: 1 } - gte: { datafeeds.0.scroll_size: 2000 } + - is_false: datafeeds.0.script_fields + - match: { datafeeds.0.aggregations.buckets.date_histogram.field: time } + - match: { datafeeds.0.aggregations.buckets.aggregations.time.max.field: time } - do: ml.get_datafeed_stats: - datafeed_id: mixed-cluster-datafeed + datafeed_id: mixed-cluster-datafeed-with-aggs - match: { datafeeds.0.state: "stopped"} - is_false: datafeeds.0.node From 238428d22f84653f130008f26e12d998a4732983 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Mon, 1 Apr 2019 08:26:51 -0400 Subject: [PATCH 57/63] [DOCS] Add 'time value' links to several monitor settings (#40633) --- .../settings/monitoring-settings.asciidoc | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/reference/settings/monitoring-settings.asciidoc b/docs/reference/settings/monitoring-settings.asciidoc index bad70a4359afe..c460ac51812b2 100644 --- a/docs/reference/settings/monitoring-settings.asciidoc +++ b/docs/reference/settings/monitoring-settings.asciidoc @@ -70,11 +70,11 @@ to pass through this cluster. `xpack.monitoring.collection.cluster.stats.timeout`:: -Sets the timeout for collecting the cluster statistics. Defaults to `10s`. +(<>) Timeout for collecting the cluster statistics. Defaults to `10s`. `xpack.monitoring.collection.node.stats.timeout`:: -Sets the timeout for collecting the node statistics. Defaults to `10s`. +(<>) Timeout for collecting the node statistics. Defaults to `10s`. `xpack.monitoring.collection.indices` (<>):: @@ -87,7 +87,7 @@ ensure monitoring of system indices. For example `.*,test*,-test3` `xpack.monitoring.collection.index.stats.timeout`:: -Sets the timeout for collecting index statistics. Defaults to `10s`. +(<>) Timeout for collecting index statistics. Defaults to `10s`. `xpack.monitoring.collection.index.recovery.active_only`:: @@ -96,11 +96,11 @@ collect only active recoveries. Defaults to `false`. `xpack.monitoring.collection.index.recovery.timeout`:: -Sets the timeout for collecting the recovery information. Defaults to `10s`. +(<>) Timeout for collecting the recovery information. Defaults to `10s`. `xpack.monitoring.history.duration`:: -Sets the retention duration beyond which the indices created by a Monitoring +(<>) Retention duration beyond which the indices created by a Monitoring exporter are automatically deleted. Defaults to `7d` (7 days). + -- @@ -206,12 +206,12 @@ The password for the `auth.username`. `connection.timeout`:: -The amount of time that the HTTP connection is supposed to wait for a socket to open for the +(<>) Amount of time that the HTTP connection is supposed to wait for a socket to open for the request. The default value is `6s`. `connection.read_timeout`:: -The amount of time that the HTTP connection is supposed to wait for a socket to +(<>) Amount of time that the HTTP connection is supposed to wait for a socket to send back a response. The default value is `10 * connection.timeout` (`60s` if neither are set). `ssl`:: From 5fb53af1677c0857bb7a9c562b9872e1d2ac95b6 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 1 Apr 2019 07:49:48 -0500 Subject: [PATCH 58/63] muted test (#40686) --- .../index/fielddata/ScriptDocValuesGeoPointsTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java index 085b43a686bf5..9d118f3fc5c82 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java @@ -114,6 +114,7 @@ public void testGeoDistance() throws IOException { assertEquals(42, emptyScript.planeDistanceWithDefault(otherLat, otherLon, 42), 0); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/40684") public void testMissingValues() throws IOException { GeoPoint[][] points = new GeoPoint[between(3, 10)][]; for (int d = 0; d < points.length; d++) { From dbbad817a9a800c0485183c3b8072c49301de1d9 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Mon, 1 Apr 2019 14:20:01 +0100 Subject: [PATCH 59/63] [TEST] Mute WebhookHttpsIntegrationTests.testHttps Due to https://github.com/elastic/elasticsearch/issues/35503 --- .../watcher/actions/webhook/WebhookHttpsIntegrationTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index 4e62eedd221d2..adbf43140328b 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -67,6 +67,7 @@ public void stopWebservice() throws Exception { webServer.close(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/35503") public void testHttps() throws Exception { webServer.enqueue(new MockResponse().setResponseCode(200).setBody("body")); HttpRequestTemplate.Builder builder = HttpRequestTemplate.builder("localhost", webServer.getPort()) From b4912f097ea127870ddb1458ddebd0c6e618b00b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 1 Apr 2019 16:20:20 +0200 Subject: [PATCH 60/63] Remove -Xlint exclusions in the ingest-geoip module. (#40629) Relates to #40366 --- modules/ingest-geoip/build.gradle | 3 --- .../org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java | 4 ++-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index f2dec9cd7b7fa..7b0f4a83e91b5 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -41,9 +41,6 @@ task copyDefaultGeoIp2DatabaseFiles(type: Copy) { project.bundlePlugin.dependsOn(copyDefaultGeoIp2DatabaseFiles) -compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked,-serial" -compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" - bundlePlugin { from("${project.buildDir}/ingest-geoip") { into '/' diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 8769a643e1d3c..a170b4417739b 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -171,14 +171,14 @@ public void close() throws IOException { * reduction of CPU usage. */ static class GeoIpCache { - private final Cache cache; + private final Cache, AbstractResponse> cache; //package private for testing GeoIpCache(long maxSize) { if (maxSize < 0) { throw new IllegalArgumentException("geoip max cache size must be 0 or greater"); } - this.cache = CacheBuilder.builder().setMaximumWeight(maxSize).build(); + this.cache = CacheBuilder., AbstractResponse>builder().setMaximumWeight(maxSize).build(); } T putIfAbsent(InetAddress ip, Class responseType, From cafde31129d85d20f9e445be131c9e85163bed9d Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 1 Apr 2019 17:34:54 +0300 Subject: [PATCH 61/63] Add JDBC trustore type to the docs (#40592) --- docs/reference/sql/endpoints/jdbc.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/sql/endpoints/jdbc.asciidoc b/docs/reference/sql/endpoints/jdbc.asciidoc index 37f3d59ef6410..7b1169d34d323 100644 --- a/docs/reference/sql/endpoints/jdbc.asciidoc +++ b/docs/reference/sql/endpoints/jdbc.asciidoc @@ -113,6 +113,8 @@ Query timeout (in seconds). That is the maximum amount of time waiting for a que `ssl.truststore.pass`:: trust store password +`ssl.truststore.type` (default `JKS`):: trust store type. `PKCS12` is a common, alternative format + `ssl.protocol`(default `TLS`):: SSL protocol to be used [float] From 9536c5f7b7fecda4592f6b169adef180eadbba54 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 1 Apr 2019 18:01:03 +0300 Subject: [PATCH 62/63] SQL: Documentation for LIKE and RLIKE operators (#40623) --- docs/reference/sql/functions/index.asciidoc | 40 +++++-- .../sql/functions/like-rlike.asciidoc | 102 ++++++++++++++++++ docs/reference/sql/functions/math.asciidoc | 2 +- .../sql/functions/operators.asciidoc | 51 ++++++--- .../qa/src/main/resources/docs/docs.csv-spec | 22 ++++ 5 files changed, 190 insertions(+), 27 deletions(-) create mode 100644 docs/reference/sql/functions/like-rlike.asciidoc diff --git a/docs/reference/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc index 6e966403ce0e9..931c28fd3f6dd 100644 --- a/docs/reference/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -6,7 +6,27 @@ {es-sql} provides a comprehensive set of built-in operators and functions: * <> -* <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +* <> +** <> +** <> +* <> ** <> ** <> ** <> @@ -24,9 +44,10 @@ ** <> ** <> ** <> -* <> +* <> ** <> -* <> +* <> +* <> ** <> ** <> ** <> @@ -47,11 +68,11 @@ ** <> ** <> ** <> -* <> +* <> ** <> ** <> ** <> -* <> +* <> ** <> ** <> ** <> @@ -80,7 +101,7 @@ ** <> ** <> ** <> -* <> +* <> ** <> ** <> ** <> @@ -101,10 +122,10 @@ ** <> ** <> ** <> -* <> +* <> ** <> ** <> -* <> +* <> ** <> ** <> ** <> @@ -112,11 +133,12 @@ ** <> ** <> ** <> -* <> +* <> ** <> ** <> include::operators.asciidoc[] +include::like-rlike.asciidoc[] include::aggs.asciidoc[] include::grouping.asciidoc[] include::date-time.asciidoc[] diff --git a/docs/reference/sql/functions/like-rlike.asciidoc b/docs/reference/sql/functions/like-rlike.asciidoc new file mode 100644 index 0000000000000..c38f62ae7d7c1 --- /dev/null +++ b/docs/reference/sql/functions/like-rlike.asciidoc @@ -0,0 +1,102 @@ +[role="xpack"] +[testenv="basic"] +[[sql-like-rlike-operators]] +=== LIKE and RLIKE Operators + +`LIKE` and `RLIKE` operators are commonly used to filter data based on string patterns. They usually act on a field placed on the left-hand side of +the operator, but can also act on a constant (literal) expression. The right-hand side of the operator represents the pattern. +Both can be used in the `WHERE` clause of the `SELECT` statement, but `LIKE` can also be used in other places, such as defining an +<> or across various <>. +This section covers only the `SELECT ... WHERE ...` usage. + +NOTE: One significant difference between `LIKE`/`RLIKE` and the <> is that the former +act on <> while the latter also work on <> fields. If the field used with `LIKE`/`RLIKE` doesn't +have an exact not-normalized sub-field (of <> type) {es-sql} will not be able to run the query. If the field is either exact +or has an exact sub-field, it will use it as is, or it will automatically use the exact sub-field even if it wasn't explicitly specified in the statement. + +[[sql-like-operator]] +==== `LIKE` + +.Synopsis: +[source, sql] +-------------------------------------------------- +expression<1> LIKE constant_exp<2> +-------------------------------------------------- + +<1> typically a field, or a constant expression +<2> pattern + +.Description: + +The SQL `LIKE` operator is used to compare a value to similar values using wildcard operators. There are two wildcards used in conjunction +with the `LIKE` operator: + +* The percent sign (%) +* The underscore (_) + +The percent sign represents zero, one or multiple characters. The underscore represents a single number or character. These symbols can be +used in combinations. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs/docs.csv-spec[simpleLike] +---- + +There is, also, the possibility of using an escape character if one needs to match the wildcard characters themselves. This can be done +by using the `ESCAPE [escape_character]` statement after the `LIKE ...` operator: + + SELECT name, author FROM library WHERE name LIKE 'Dune/%' ESCAPE '/'; + +In the example above `/` is defined as an escape character which needs to be placed before the `%` or `_` characters if one needs to +match those characters in the pattern specifically. By default, there is no escape character defined. + +IMPORTANT: Even though `LIKE` is a valid option when searching or filtering in {es-sql}, full-text search predicates +`MATCH` and `QUERY` are <>. + +[[sql-rlike-operator]] +==== `RLIKE` + +.Synopsis: +[source, sql] +-------------------------------------------------- +expression<1> RLIKE constant_exp<2> +-------------------------------------------------- + +<1> typically a field, or a constant expression +<2> pattern + +.Description: + +This operator is similar to `LIKE`, but the user is not limited to search for a string based on a fixed pattern with the percent sign (`%`) +and underscore (`_`); the pattern in this case is a regular expression which allows the construction of more flexible patterns. + +For more details about the regular expressions syntax, https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/regex/Pattern.html[Java's Pattern class javadoc] +is a good starting point. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs/docs.csv-spec[simpleRLike] +---- + +IMPORTANT: Even though `RLIKE` is a valid option when searching or filtering in {es-sql}, full-text search predicates +`MATCH` and `QUERY` are <>. + +[[sql-like-prefer-full-text]] +==== Prefer full-text search predicates + +When using `LIKE`/`RLIKE`, do consider using <> which are faster, much more powerful +and offer the option of sorting by relevancy (results can be returned based on how well they matched). + +For example: + +[cols="`) +[[sql-operators-null-safe-equality]] +==== `Null safe Equality (<=>)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -24,35 +26,40 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[nullEqualsCompareWithNull] include-tagged::{sql-specs}/docs/docs.csv-spec[nullEqualsCompareTwoNulls] -------------------------------------------------- -* Inequality (`<>` or `!=`) +[[sql-operators-inequality]] +==== `Inequality (<> or !=)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldNonEquality] -------------------------------------------------- -* Comparison (`<`, `<=`, `>`, `>=`) +[[sql-operators-comparison]] +==== `Comparison (<, <=, >, >=)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldLessThan] -------------------------------------------------- -* `BETWEEN` +[[sql-operators-between]] +==== `BETWEEN` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereBetween] -------------------------------------------------- -* `IS NULL/IS NOT NULL` +[[sql-operators-is-null]] +==== `IS NULL/IS NOT NULL` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereIsNotNullAndIsNull] -------------------------------------------------- -* `IN (, , ...)` +[[sql-operators-in]] +==== `IN (, , ...)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -64,21 +71,24 @@ include-tagged::{sql-specs}/filter.sql-spec[whereWithInAndMultipleValues] Boolean operator for evaluating one or two expressions. -* `AND` +[[sql-operators-and]] +==== `AND` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldAndComparison] -------------------------------------------------- -* `OR` +[[sql-operators-or]] +==== `OR` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/filter.sql-spec[whereFieldOrComparison] -------------------------------------------------- -* `NOT` +[[sql-operators-not]] +==== `NOT` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -91,42 +101,48 @@ include-tagged::{sql-specs}/filter.sql-spec[whereFieldEqualityNot] Perform mathematical operations affecting one or two values. The result is a value of numeric type. -* Add (`+`) +[[sql-operators-plus]] +==== `Add (+)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[plus] -------------------------------------------------- -* Subtract (infix `-`) +[[sql-operators-subtract]] +==== `Subtract (infix -)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[minus] -------------------------------------------------- -* Negate (unary `-`) +[[sql-operators-negate]] +==== `Negate (unary -)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[unaryMinus] -------------------------------------------------- -* Multiply (`*`) +[[sql-operators-multiply]] +==== `Multiply (*)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[multiply] -------------------------------------------------- -* Divide (`/`) +[[sql-operators-divide]] +==== `Divide (/)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/arithmetic.sql-spec[divide] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Modulo_operation[Modulo] or Remainder(`%`) +[[sql-operators-remainder]] +==== `Modulo or Remainder(%)` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -136,7 +152,8 @@ include-tagged::{sql-specs}/arithmetic.sql-spec[mod] [[sql-operators-cast]] === Cast Operators -* Cast (`::`) +[[sql-operators-cast-cast]] +==== `Cast (::)` `::` provides an alternative syntax to the <> function. diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index 93a693a69b476..6c57f19cbbece 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -2495,3 +2495,25 @@ SELECT first_name, last_name FROM emp WHERE last_name NOT LIKE '%a%' AND first_n Anoosh |Peyn Arumugam |Ossenbruggen ; + +simpleLikeOperator +// tag::simpleLike +SELECT author, name FROM library WHERE name LIKE 'Dune%'; + + author | name +---------------+--------------- +Frank Herbert |Dune +Frank Herbert |Dune Messiah +// end::simpleLike +; + +simpleRLikeOperator +// tag::simpleRLike +SELECT author, name FROM library WHERE name RLIKE 'Child.* Dune'; + + author | name +---------------+---------------- +Frank Herbert |Children of Dune +// end::simpleRLike +; + From 1bba887d481b49db231a1442922f1813952dcc67 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 1 Apr 2019 18:10:51 +0300 Subject: [PATCH 63/63] SQL: have LIKE/RLIKE use wildcard and regexp queries (#40628) * Have LIKE and RLIKE only use term-level queries (wildcard and regexp respectively). They are already working only with exact fields, thus be in-line with how SQL works in general (what you index is what you search on). --- .../xpack/sql/expression/FieldAttribute.java | 2 +- .../xpack/sql/planner/QueryTranslator.java | 24 ++----- .../analyzer/VerifierErrorMessagesTests.java | 8 ++- .../sql/planner/QueryTranslatorTests.java | 68 ++++++++++++------- 4 files changed, 57 insertions(+), 45 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java index 811cc299ccb97..cb86e2742b2d8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java @@ -88,7 +88,7 @@ public EsField.Exact getExactInfo() { public FieldAttribute exactAttribute() { EsField exactField = field.getExactField(); if (exactField.equals(field) == false) { - return innerField(field.getExactField()); + return innerField(exactField); } return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 1fdd27d9b0b2d..1ad5f812777b2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -24,9 +24,9 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.ExtendedStats; import org.elasticsearch.xpack.sql.expression.function.aggregate.First; import org.elasticsearch.xpack.sql.expression.function.aggregate.Last; -import org.elasticsearch.xpack.sql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.sql.expression.function.aggregate.MatrixStats; import org.elasticsearch.xpack.sql.expression.function.aggregate.Max; +import org.elasticsearch.xpack.sql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.sql.expression.function.aggregate.Min; import org.elasticsearch.xpack.sql.expression.function.aggregate.PercentileRanks; import org.elasticsearch.xpack.sql.expression.function.aggregate.Percentiles; @@ -470,7 +470,6 @@ private static String topAggsField(AggregateFunction af, Expression e) { af.nodeString()); } - // TODO: need to optimize on ngram // TODO: see whether escaping is needed @SuppressWarnings("rawtypes") static class Likes extends ExpressionTranslator { @@ -478,34 +477,23 @@ static class Likes extends ExpressionTranslator { @Override protected QueryTranslation asQuery(RegexMatch e, boolean onAggs) { Query q = null; - boolean inexact = true; - String target = null; + String targetFieldName = null; if (e.field() instanceof FieldAttribute) { - target = nameOf(((FieldAttribute) e.field()).exactAttribute()); + targetFieldName = nameOf(((FieldAttribute) e.field()).exactAttribute()); } else { - throw new SqlIllegalArgumentException("Scalar function ({}) not allowed (yet) as arguments for LIKE", + throw new SqlIllegalArgumentException("Scalar function [{}] not allowed (yet) as argument for " + e.functionName(), Expressions.name(e.field())); } if (e instanceof Like) { LikePattern p = ((Like) e).pattern(); - if (inexact) { - q = new QueryStringQuery(e.source(), p.asLuceneWildcard(), target); - } - else { - q = new WildcardQuery(e.source(), nameOf(e.field()), p.asLuceneWildcard()); - } + q = new WildcardQuery(e.source(), targetFieldName, p.asLuceneWildcard()); } if (e instanceof RLike) { String pattern = ((RLike) e).pattern(); - if (inexact) { - q = new QueryStringQuery(e.source(), "/" + pattern + "/", target); - } - else { - q = new RegexQuery(e.source(), nameOf(e.field()), pattern); - } + q = new RegexQuery(e.source(), targetFieldName, pattern); } return q != null ? new QueryTranslation(wrapIfNested(q, e.field())) : null; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 9d55d4aeec7b5..c76ddffe437c6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -535,12 +535,18 @@ public void testInvalidTypeForFunction_WithFourArgs() { error("SELECT INSERT('text', 1, 2, 3)")); } - public void testInvalidTypeForRegexMatch() { + public void testInvalidTypeForLikeMatch() { assertEquals("1:26: [text LIKE 'foo'] cannot operate on field of data type [text]: " + "No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead", error("SELECT * FROM test WHERE text LIKE 'foo'")); } + public void testInvalidTypeForRLikeMatch() { + assertEquals("1:26: [text RLIKE 'foo'] cannot operate on field of data type [text]: " + + "No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead", + error("SELECT * FROM test WHERE text RLIKE 'foo'")); + } + public void testAllowCorrectFieldsInIncompatibleMappings() { assertNotNull(incompatibleAccept("SELECT languages FROM \"*\"")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index a39b5466bc10f..d1408688427e5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -41,11 +41,12 @@ import org.elasticsearch.xpack.sql.querydsl.query.ExistsQuery; import org.elasticsearch.xpack.sql.querydsl.query.NotQuery; import org.elasticsearch.xpack.sql.querydsl.query.Query; -import org.elasticsearch.xpack.sql.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.sql.querydsl.query.RangeQuery; +import org.elasticsearch.xpack.sql.querydsl.query.RegexQuery; import org.elasticsearch.xpack.sql.querydsl.query.ScriptQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermsQuery; +import org.elasticsearch.xpack.sql.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; @@ -186,20 +187,41 @@ public void testLikeOnInexact() { assertTrue(p instanceof Filter); Expression condition = ((Filter) p).condition(); QueryTranslation qt = QueryTranslator.toQuery(condition, false); - assertEquals(QueryStringQuery.class, qt.query.getClass()); - QueryStringQuery qsq = ((QueryStringQuery) qt.query); - assertEquals(1, qsq.fields().size()); - assertEquals("some.string.typical", qsq.fields().keySet().iterator().next()); + assertEquals(WildcardQuery.class, qt.query.getClass()); + WildcardQuery qsq = ((WildcardQuery) qt.query); + assertEquals("some.string.typical", qsq.field()); + } + + public void testRLikeOnInexact() { + LogicalPlan p = plan("SELECT * FROM test WHERE some.string RLIKE '.*a.*'"); + assertTrue(p instanceof Project); + p = ((Project) p).child(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); + QueryTranslation qt = QueryTranslator.toQuery(condition, false); + assertEquals(RegexQuery.class, qt.query.getClass()); + RegexQuery qsq = ((RegexQuery) qt.query); + assertEquals("some.string.typical", qsq.field()); } public void testLikeConstructsNotSupported() { - LogicalPlan p = plan("SELECT LTRIM(keyword) lt FROM test WHERE LTRIM(keyword) LIKE '%a%'"); + LogicalPlan p = plan("SELECT LTRIM(keyword) lt FROM test WHERE LTRIM(keyword) like '%a%'"); + assertTrue(p instanceof Project); + p = ((Project) p).child(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); + SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> QueryTranslator.toQuery(condition, false)); + assertEquals("Scalar function [LTRIM(keyword)] not allowed (yet) as argument for LIKE", ex.getMessage()); + } + + public void testRLikeConstructsNotSupported() { + LogicalPlan p = plan("SELECT LTRIM(keyword) lt FROM test WHERE LTRIM(keyword) RLIKE '.*a.*'"); assertTrue(p instanceof Project); p = ((Project) p).child(); assertTrue(p instanceof Filter); Expression condition = ((Filter) p).condition(); SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> QueryTranslator.toQuery(condition, false)); - assertEquals("Scalar function (LTRIM(keyword)) not allowed (yet) as arguments for LIKE", ex.getMessage()); + assertEquals("Scalar function [LTRIM(keyword)] not allowed (yet) as argument for RLIKE", ex.getMessage()); } public void testDifferentLikeAndNotLikePatterns() { @@ -213,20 +235,18 @@ public void testDifferentLikeAndNotLikePatterns() { assertEquals(BoolQuery.class, qt.query.getClass()); BoolQuery bq = ((BoolQuery) qt.query); assertTrue(bq.isAnd()); - assertTrue(bq.left() instanceof QueryStringQuery); + assertTrue(bq.left() instanceof WildcardQuery); assertTrue(bq.right() instanceof NotQuery); NotQuery nq = (NotQuery) bq.right(); - assertTrue(nq.child() instanceof QueryStringQuery); - QueryStringQuery lqsq = (QueryStringQuery) bq.left(); - QueryStringQuery rqsq = (QueryStringQuery) nq.child(); + assertTrue(nq.child() instanceof WildcardQuery); + WildcardQuery lqsq = (WildcardQuery) bq.left(); + WildcardQuery rqsq = (WildcardQuery) nq.child(); assertEquals("X*", lqsq.query()); - assertEquals(1, lqsq.fields().size()); - assertEquals("keyword", lqsq.fields().keySet().iterator().next()); + assertEquals("keyword", lqsq.field()); assertEquals("Y*", rqsq.query()); - assertEquals(1, rqsq.fields().size()); - assertEquals("keyword", rqsq.fields().keySet().iterator().next()); + assertEquals("keyword", rqsq.field()); } public void testRLikePatterns() { @@ -248,20 +268,18 @@ private void assertDifferentRLikeAndNotRLikePatterns(String firstPattern, String assertEquals(BoolQuery.class, qt.query.getClass()); BoolQuery bq = ((BoolQuery) qt.query); assertTrue(bq.isAnd()); - assertTrue(bq.left() instanceof QueryStringQuery); + assertTrue(bq.left() instanceof RegexQuery); assertTrue(bq.right() instanceof NotQuery); NotQuery nq = (NotQuery) bq.right(); - assertTrue(nq.child() instanceof QueryStringQuery); - QueryStringQuery lqsq = (QueryStringQuery) bq.left(); - QueryStringQuery rqsq = (QueryStringQuery) nq.child(); + assertTrue(nq.child() instanceof RegexQuery); + RegexQuery lqsq = (RegexQuery) bq.left(); + RegexQuery rqsq = (RegexQuery) nq.child(); - assertEquals("/" + firstPattern + "/", lqsq.query()); - assertEquals(1, lqsq.fields().size()); - assertEquals("keyword", lqsq.fields().keySet().iterator().next()); - assertEquals("/" + secondPattern + "/", rqsq.query()); - assertEquals(1, rqsq.fields().size()); - assertEquals("keyword", rqsq.fields().keySet().iterator().next()); + assertEquals(firstPattern, lqsq.regex()); + assertEquals("keyword", lqsq.field()); + assertEquals(secondPattern, rqsq.regex()); + assertEquals("keyword", rqsq.field()); } public void testTranslateNotExpression_WhereClause_Painless() {